chenkun 1 month ago
parent
commit
94b621ea91

File diff suppressed because it is too large
+ 21266 - 8315
logs/lq-admin-app.log.1


File diff suppressed because it is too large
+ 17540 - 0
logs/lq-admin-app.log.5


+ 142 - 125
scripts/lq_db.sql

@@ -43,89 +43,92 @@ CREATE TABLE `t_basis_of_preparation`  (
   `created_at` timestamp(0) NULL DEFAULT CURRENT_TIMESTAMP(0) COMMENT '记录创建时间',
   `updated_at` timestamp(0) NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '记录更新时间',
   `deleted` tinyint(1) UNSIGNED ZEROFILL NULL DEFAULT NULL COMMENT '逻辑删除',
+  `document_id` varchar(36) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL COMMENT '关联文档ID',
   PRIMARY KEY (`id`) USING BTREE,
   INDEX `idx_standard_no`(`standard_no`) USING BTREE COMMENT '标准编号索引',
   INDEX `idx_chinese_name`(`file_name`(100)) USING BTREE COMMENT '中文名称索引',
   INDEX `idx_release_date`(`release_date`) USING BTREE COMMENT '发布日期索引',
   INDEX `idx_document_type`(`document_type`) USING BTREE COMMENT '标准类型索引',
-  INDEX `idx_professional_field`(`professional_field`) USING BTREE COMMENT '专业领域索引'
+  INDEX `idx_professional_field`(`professional_field`) USING BTREE COMMENT '专业领域索引',
+  INDEX `idx_basis_document_id`(`document_id`) USING BTREE,
+  CONSTRAINT `fk_basis_document` FOREIGN KEY (`document_id`) REFERENCES `t_samp_document_main` (`id`) ON DELETE CASCADE ON UPDATE CASCADE
 ) ENGINE = InnoDB CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci COMMENT = '编制依据基本信息表' ROW_FORMAT = Dynamic;
 
 -- ----------------------------
 -- Records of t_basis_of_preparation
 -- ----------------------------
-INSERT INTO `t_basis_of_preparation` VALUES ('1', '', '中华人民共和国水土保持法', NULL, '主席令第39号', NULL, '第十一届全国人民代表大会常务委员会', '2010-12-25', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('10', '', '粗直径钢丝绳', NULL, 'GB/T20067-2017', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2017-09-07', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('100', '', '熔化焊用钢丝', NULL, 'GB/T14957-1994', NULL, '国家技术监督局', '1994-04-04', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '废止', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('101', '', '起重机吊装工和指挥人员的培训', NULL, 'GB/T23721-2009', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2009-05-18', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('102', '', '中华人民共和国环境保护法', NULL, '主席令第9号(2014修订)', NULL, '全国人民代表大会常务委员会', '2014-04-24', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('103', '', '起重机设计规范', NULL, 'GB/T3811-2008', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2008-06-03', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('104', '', '生产安全事故报告和调查处理条例', NULL, '国务院令第493号', NULL, '国务院', '2007-04-09', NULL, NULL, NULL, '法律法规', '行政法规', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('105', '', '安全带', NULL, 'GB6095-2021', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2021-04-30', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('106', '', '公路桥涵设计通用规范', NULL, 'JTGD60-2015', NULL, '交通运输部', '2015-09-23', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('107', '', '埋弧焊用非合金钢及细晶粒钢实心焊丝、药芯焊丝和焊丝-焊剂组合', NULL, 'GB/T5293-2018', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2018-12-28', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('108', '', '公路桥涵施工技术规范', NULL, 'JTG/T3650-2020', NULL, '中华人民共和国交通运输部', '2020-04-28', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('109', '', '桥梁用结构钢', NULL, 'GB/T714-2015', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2015-04-30', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('11', '', '非合金钢及细晶粒钢药芯焊丝', NULL, 'GB/T10045-2018', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2018-06-07', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('110', '', '焊接材料质量管理规程', NULL, 'JB/T3223-2017', NULL, '工业和信息化部', '2017-11-07', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('111', '', '建筑工程施工现场标志设置技术规程', NULL, 'JGJ348-2014', NULL, '中华人民共和国住房和城乡建设部', '2014-01-09', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('112', '', '钢结构设计标准含条文说明', NULL, 'GB50017-2017', NULL, '中华人民共和国住房和城乡建设部、国家质量监督检验检疫总局', '2017-03-29', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('113', '', '厚度方向性能钢板', NULL, 'GB/T5313-2010', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2010-12-23', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('114', '', '钢丝绳通用技术条件', NULL, 'GB/T20118-2017', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2017-07-12', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('115', '', '公路桥涵地基与基础设计规范', NULL, 'JTG3363-2019', NULL, '交通运输部', '2019-03-15', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('116', '', '公路水运工程临时用电技术规程上传系统', NULL, 'JT/T1499-2024', NULL, '交通运输部', '2024-01-04', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('117', '', '公路路基施工技术规范', NULL, 'JTG/T3610-2019', NULL, '交通运输部', '2019-03-15', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('118', '', '钢结构焊接规范', NULL, 'GB50661-2011', NULL, '中华人民共和国住房和城乡建设部、国家质量监督检验检疫总局', '2011-05-18', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('119', '', '公路工程预制梁架设施工标准化作业手册', NULL, NULL, NULL, '交通运输部', NULL, NULL, NULL, NULL, '技术规范', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('12', '', '厚度方向性能钢板', NULL, 'GB/T5313-2023', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2023-09-07', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('120', '', '中华人民共和国森林法', NULL, '主席令第39号(2019修订)', NULL, '全国人民代表大会常务委员会', '2019-12-28', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('121', '', '公路工程水泥及水泥混凝土试验规程', NULL, 'JTG3420-2020', NULL, '交通运输部', '2020-02-26', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('122', '', '建设工程安全生产管理条例2023', NULL, '国务院令第393号(2023修订)', NULL, '国务院', '2023-07-29', NULL, NULL, NULL, '法律法规', '行政法规', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('123', '', '钢结构工程施工质量验收标准', NULL, 'GB50205-2020', NULL, '中华人民共和国住房和城乡建设部、国家质量监督检验检疫总局', '2020-11-18', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('124', '', '公路工程施工现场安全防护技术要求', NULL, 'JT/T1508-2024', NULL, '交通运输部', '2024-01-04', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('125', '', '危险性较大的分部分项工程专项施工方案编制与管理指南', NULL, 'T/CECS20011-2022', NULL, '中国工程建设标准化协会', '2022-03-01', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('126', '', '起重机械安全规程', NULL, 'GB6067.5-2014', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2014-07-08', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('127', '', '建筑电气工程施工质量验收规范', NULL, 'GB50303-2015', NULL, '中华人民共和国住房和城乡建设部', '2015-09-10', NULL, NULL, NULL, '技术规范', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('128', '', '建筑施工高处作业安全技术规范', NULL, 'JGJ80-2016', NULL, '中华人民共和国住房和城乡建设部', '2016-01-22', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('129', '', '中华人民共和国安全生产法', NULL, '主席令第88号(2021修订)', NULL, '全国人民代表大会常务委员会', '2021-06-10', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('13', '', '建筑施工起重吊装工程安全技术规范', NULL, 'JGJ276-2012', NULL, '中华人民共和国住房和城乡建设部', '2012-08-23', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('130', '', '碳素结构钢', NULL, 'GB/T700-2006', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2006-11-01', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('131', '', '厚钢板超声检测方法', NULL, 'GB/T2970-2016', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2016-02-24', NULL, NULL, NULL, '国家标准', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('132', '', '中华人民共和国野生植物保护条例', NULL, '国务院令第204号(2024修订)', NULL, '国务院', '2024-07-20', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('133', '', '国务院关于进一步加强企业安全生产工作的通知', NULL, '国发〔2010〕23号', NULL, '国务院', '2010-07-19', NULL, NULL, NULL, '规范性文件', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('134', '', '建设工程施工现场供用电安全规范', NULL, 'GB50194-2014', NULL, '中华人民共和国住房和城乡建设部、国家质量监督检验检疫总局', '2014-12-01', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('135', '', '中华人民共和国道路交通安全法', NULL, '主席令第47号(2021修订)', NULL, '全国人民代表大会常务委员会', '2021-04-29', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('136', '', '生产经营单位生产安全事故应急预案编制导则', NULL, 'GB/T29639-2020', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2020-03-06', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('137', '', '起重设备安装工程施工及验收规范', NULL, 'GB50278-2010', NULL, '中华人民共和国住房和城乡建设部、国家质量监督检验检疫总局', '2010-09-26', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('138', '', '中华人民共和国野生动物保护法', NULL, '主席令第19号(2022修订)', NULL, '全国人民代表大会常务委员会', '2022-12-30', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('139', '', '架桥机通用技术条件', NULL, 'GB/T26470-2011', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2011-06-16', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('14', '', '劳动防护用品监督管理规定', NULL, NULL, NULL, '国家安全生产监督管理总局', '2005-07-22', NULL, NULL, NULL, '法律法规', '部门规章', NULL, NULL, NULL, NULL, '废止', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('140', '', '建筑施工安全检查标准', NULL, 'JGJ59-2011', NULL, '中华人民共和国住房和城乡建设部', '2011-12-15', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('141', '', '中华人民共和国特种设备安全法', NULL, '主席令第4号(2021修订)', NULL, '全国人民代表大会常务委员会', '2013-06-29', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('142', '', '四川省安全生产条例', NULL, NULL, NULL, '四川省人民代表大会常务委员会', '2021-07-29', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('143', '', '市政架桥机安全使用技术规程', NULL, 'JGJ266-2011', NULL, '中华人民共和国住房和城乡建设部', '2011-02-16', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('144', '', '公路桥梁钢结构防腐涂装技术条件', NULL, 'JT/T722-2023', NULL, '交通运输部', '2023-03-01', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('145', '', '中华人民共和国土地管理法实施条例中华人民共和国土地管理法实施', NULL, '国务院令第743号(2021修订)', NULL, '国务院', '2021-07-02', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('146', '', '金属材料焊缝破坏性试验熔化焊接头焊缝金属纵向拉伸试验', NULL, 'GB/T2652-2022', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2022-12-30', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('147', '', '起重机钢丝绳保养维护检验和报废', NULL, 'GB/T5972-2016', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2016-07-26', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '废止', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('148', '', '电弧螺柱焊用圆柱头焊钉', NULL, 'GB/T10433-2002', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2002-12-05', NULL, NULL, NULL, '国家标准', NULL, NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('149', '', '高处作业吊篮', NULL, 'GB/T19155-2017', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2017-12-29', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('15', '', '钢结构用高强度大六角头螺栓', NULL, 'GB1228-2006', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2006-03-27', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('150', '', '低合金高强度结构钢', NULL, 'GB/T1591-2018', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2018-06-07', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('151', '', '架桥机使用说明书', NULL, NULL, NULL, '架桥机生产企业', NULL, NULL, NULL, NULL, '技术文件', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('152', '', '公路水运工程淘汰危及生产安全施工工艺、设备和材料目录第一批条文说明', NULL, '交安监发〔2022〕37号', NULL, '交通运输部', '2022-04-08', NULL, NULL, NULL, '规范性文件', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('153', '', '钢筋机械连接技术规程', NULL, 'JGJ107-2016', NULL, '中华人民共和国住房和城乡建设部', '2016-01-14', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('154', '', '电力高处作业防坠器', NULL, 'DL/T1147-2009', NULL, '国家能源局', '2009-12-11', NULL, NULL, NULL, '电力行业标准', '参考规范', NULL, NULL, NULL, NULL, '废止', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('155', '', '电力高处作业防坠器', NULL, 'DL/T1147-2018', NULL, '国家能源局', '2018-12-10', NULL, NULL, NULL, '电力行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('156', '', '坠落防护速差自控器', NULL, 'GB24544-2023', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2023-05-23', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('16', '', '架桥机安全规程', NULL, 'GB26469-2011', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2011-05-12', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('17', '', '中华人民共和国建筑法', NULL, NULL, NULL, '全国人民代表大会常务委员会', '1997-11-01', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('18', '', '钢结构用高强度垫圈', NULL, 'GB/T1230-2006', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2006-03-27', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('19', '', '坠落防护挂点装置', NULL, 'GB30862-2014', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2014-07-24', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('2', '', '公路水运工程临时用电技术规程', NULL, 'JTT1499-2024', NULL, '交通运输部', '2024-04-02', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('20', '', '起重机械安全监控管理系统', NULL, 'GB/T28264-2017', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2017-12-29', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('21', '', '坠落防护水平生命线装置', NULL, 'GB38454-2019', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2019-12-31', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('22', '', '起重机手势信号', NULL, 'GB5082-2019', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2019-12-10', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
+INSERT INTO `t_basis_of_preparation` VALUES ('1', '', '中华人民共和国水土保持法', NULL, '主席令第39号', NULL, '第十一届全国人民代表大会常务委员会', '2010-12-25', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-1');
+INSERT INTO `t_basis_of_preparation` VALUES ('10', '', '粗直径钢丝绳', NULL, 'GB/T20067-2017', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2017-09-07', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-2');
+INSERT INTO `t_basis_of_preparation` VALUES ('100', '', '熔化焊用钢丝', NULL, 'GB/T14957-1994', NULL, '国家技术监督局', '1994-04-04', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '废止', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-3');
+INSERT INTO `t_basis_of_preparation` VALUES ('101', '', '起重机吊装工和指挥人员的培训', NULL, 'GB/T23721-2009', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2009-05-18', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-4');
+INSERT INTO `t_basis_of_preparation` VALUES ('102', '', '中华人民共和国环境保护法', NULL, '主席令第9号(2014修订)', NULL, '全国人民代表大会常务委员会', '2014-04-24', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-5');
+INSERT INTO `t_basis_of_preparation` VALUES ('103', '', '起重机设计规范', NULL, 'GB/T3811-2008', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2008-06-03', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-6');
+INSERT INTO `t_basis_of_preparation` VALUES ('104', '', '生产安全事故报告和调查处理条例', NULL, '国务院令第493号', NULL, '国务院', '2007-04-09', NULL, NULL, NULL, '法律法规', '行政法规', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-7');
+INSERT INTO `t_basis_of_preparation` VALUES ('105', '', '安全带', NULL, 'GB6095-2021', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2021-04-30', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-8');
+INSERT INTO `t_basis_of_preparation` VALUES ('106', '', '公路桥涵设计通用规范', NULL, 'JTGD60-2015', NULL, '交通运输部', '2015-09-23', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-9');
+INSERT INTO `t_basis_of_preparation` VALUES ('107', '', '埋弧焊用非合金钢及细晶粒钢实心焊丝、药芯焊丝和焊丝-焊剂组合', NULL, 'GB/T5293-2018', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2018-12-28', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-10');
+INSERT INTO `t_basis_of_preparation` VALUES ('108', '', '公路桥涵施工技术规范', NULL, 'JTG/T3650-2020', NULL, '中华人民共和国交通运输部', '2020-04-28', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-11');
+INSERT INTO `t_basis_of_preparation` VALUES ('109', '', '桥梁用结构钢', NULL, 'GB/T714-2015', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2015-04-30', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-12');
+INSERT INTO `t_basis_of_preparation` VALUES ('11', '', '非合金钢及细晶粒钢药芯焊丝', NULL, 'GB/T10045-2018', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2018-06-07', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-13');
+INSERT INTO `t_basis_of_preparation` VALUES ('110', '', '焊接材料质量管理规程', NULL, 'JB/T3223-2017', NULL, '工业和信息化部', '2017-11-07', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-14');
+INSERT INTO `t_basis_of_preparation` VALUES ('111', '', '建筑工程施工现场标志设置技术规程', NULL, 'JGJ348-2014', NULL, '中华人民共和国住房和城乡建设部', '2014-01-09', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-15');
+INSERT INTO `t_basis_of_preparation` VALUES ('112', '', '钢结构设计标准含条文说明', NULL, 'GB50017-2017', NULL, '中华人民共和国住房和城乡建设部、国家质量监督检验检疫总局', '2017-03-29', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-16');
+INSERT INTO `t_basis_of_preparation` VALUES ('113', '', '厚度方向性能钢板', NULL, 'GB/T5313-2010', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2010-12-23', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-17');
+INSERT INTO `t_basis_of_preparation` VALUES ('114', '', '钢丝绳通用技术条件', NULL, 'GB/T20118-2017', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2017-07-12', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-18');
+INSERT INTO `t_basis_of_preparation` VALUES ('115', '', '公路桥涵地基与基础设计规范', NULL, 'JTG3363-2019', NULL, '交通运输部', '2019-03-15', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-19');
+INSERT INTO `t_basis_of_preparation` VALUES ('116', '', '公路水运工程临时用电技术规程上传系统', NULL, 'JT/T1499-2024', NULL, '交通运输部', '2024-01-04', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-20');
+INSERT INTO `t_basis_of_preparation` VALUES ('117', '', '公路路基施工技术规范', NULL, 'JTG/T3610-2019', NULL, '交通运输部', '2019-03-15', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-21');
+INSERT INTO `t_basis_of_preparation` VALUES ('118', '', '钢结构焊接规范', NULL, 'GB50661-2011', NULL, '中华人民共和国住房和城乡建设部、国家质量监督检验检疫总局', '2011-05-18', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-22');
+INSERT INTO `t_basis_of_preparation` VALUES ('119', '', '公路工程预制梁架设施工标准化作业手册', NULL, NULL, NULL, '交通运输部', NULL, NULL, NULL, NULL, '技术规范', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-23');
+INSERT INTO `t_basis_of_preparation` VALUES ('12', '', '厚度方向性能钢板', NULL, 'GB/T5313-2023', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2023-09-07', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-24');
+INSERT INTO `t_basis_of_preparation` VALUES ('120', '', '中华人民共和国森林法', NULL, '主席令第39号(2019修订)', NULL, '全国人民代表大会常务委员会', '2019-12-28', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-25');
+INSERT INTO `t_basis_of_preparation` VALUES ('121', '', '公路工程水泥及水泥混凝土试验规程', NULL, 'JTG3420-2020', NULL, '交通运输部', '2020-02-26', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-26');
+INSERT INTO `t_basis_of_preparation` VALUES ('122', '', '建设工程安全生产管理条例2023', NULL, '国务院令第393号(2023修订)', NULL, '国务院', '2023-07-29', NULL, NULL, NULL, '法律法规', '行政法规', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-27');
+INSERT INTO `t_basis_of_preparation` VALUES ('123', '', '钢结构工程施工质量验收标准', NULL, 'GB50205-2020', NULL, '中华人民共和国住房和城乡建设部、国家质量监督检验检疫总局', '2020-11-18', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-28');
+INSERT INTO `t_basis_of_preparation` VALUES ('124', '', '公路工程施工现场安全防护技术要求', NULL, 'JT/T1508-2024', NULL, '交通运输部', '2024-01-04', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-29');
+INSERT INTO `t_basis_of_preparation` VALUES ('125', '', '危险性较大的分部分项工程专项施工方案编制与管理指南', NULL, 'T/CECS20011-2022', NULL, '中国工程建设标准化协会', '2022-03-01', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-30');
+INSERT INTO `t_basis_of_preparation` VALUES ('126', '', '起重机械安全规程', NULL, 'GB6067.5-2014', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2014-07-08', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-31');
+INSERT INTO `t_basis_of_preparation` VALUES ('127', '', '建筑电气工程施工质量验收规范', NULL, 'GB50303-2015', NULL, '中华人民共和国住房和城乡建设部', '2015-09-10', NULL, NULL, NULL, '技术规范', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-32');
+INSERT INTO `t_basis_of_preparation` VALUES ('128', '', '建筑施工高处作业安全技术规范', NULL, 'JGJ80-2016', NULL, '中华人民共和国住房和城乡建设部', '2016-01-22', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-33');
+INSERT INTO `t_basis_of_preparation` VALUES ('129', '', '中华人民共和国安全生产法', NULL, '主席令第88号(2021修订)', NULL, '全国人民代表大会常务委员会', '2021-06-10', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-34');
+INSERT INTO `t_basis_of_preparation` VALUES ('13', '', '建筑施工起重吊装工程安全技术规范', NULL, 'JGJ276-2012', NULL, '中华人民共和国住房和城乡建设部', '2012-08-23', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-35');
+INSERT INTO `t_basis_of_preparation` VALUES ('130', '', '碳素结构钢', NULL, 'GB/T700-2006', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2006-11-01', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-36');
+INSERT INTO `t_basis_of_preparation` VALUES ('131', '', '厚钢板超声检测方法', NULL, 'GB/T2970-2016', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2016-02-24', NULL, NULL, NULL, '国家标准', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-37');
+INSERT INTO `t_basis_of_preparation` VALUES ('132', '', '中华人民共和国野生植物保护条例', NULL, '国务院令第204号(2024修订)', NULL, '国务院', '2024-07-20', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-38');
+INSERT INTO `t_basis_of_preparation` VALUES ('133', '', '国务院关于进一步加强企业安全生产工作的通知', NULL, '国发〔2010〕23号', NULL, '国务院', '2010-07-19', NULL, NULL, NULL, '规范性文件', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-39');
+INSERT INTO `t_basis_of_preparation` VALUES ('134', '', '建设工程施工现场供用电安全规范', NULL, 'GB50194-2014', NULL, '中华人民共和国住房和城乡建设部、国家质量监督检验检疫总局', '2014-12-01', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-40');
+INSERT INTO `t_basis_of_preparation` VALUES ('135', '', '中华人民共和国道路交通安全法', NULL, '主席令第47号(2021修订)', NULL, '全国人民代表大会常务委员会', '2021-04-29', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-41');
+INSERT INTO `t_basis_of_preparation` VALUES ('136', '', '生产经营单位生产安全事故应急预案编制导则', NULL, 'GB/T29639-2020', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2020-03-06', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-42');
+INSERT INTO `t_basis_of_preparation` VALUES ('137', '', '起重设备安装工程施工及验收规范', NULL, 'GB50278-2010', NULL, '中华人民共和国住房和城乡建设部、国家质量监督检验检疫总局', '2010-09-26', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-43');
+INSERT INTO `t_basis_of_preparation` VALUES ('138', '', '中华人民共和国野生动物保护法', NULL, '主席令第19号(2022修订)', NULL, '全国人民代表大会常务委员会', '2022-12-30', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-44');
+INSERT INTO `t_basis_of_preparation` VALUES ('139', '', '架桥机通用技术条件', NULL, 'GB/T26470-2011', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2011-06-16', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-45');
+INSERT INTO `t_basis_of_preparation` VALUES ('14', '', '劳动防护用品监督管理规定', NULL, NULL, NULL, '国家安全生产监督管理总局', '2005-07-22', NULL, NULL, NULL, '法律法规', '部门规章', NULL, NULL, NULL, NULL, '废止', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-46');
+INSERT INTO `t_basis_of_preparation` VALUES ('140', '', '建筑施工安全检查标准', NULL, 'JGJ59-2011', NULL, '中华人民共和国住房和城乡建设部', '2011-12-15', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-47');
+INSERT INTO `t_basis_of_preparation` VALUES ('141', '', '中华人民共和国特种设备安全法', NULL, '主席令第4号(2021修订)', NULL, '全国人民代表大会常务委员会', '2013-06-29', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-48');
+INSERT INTO `t_basis_of_preparation` VALUES ('142', '', '四川省安全生产条例', NULL, NULL, NULL, '四川省人民代表大会常务委员会', '2021-07-29', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-49');
+INSERT INTO `t_basis_of_preparation` VALUES ('143', '', '市政架桥机安全使用技术规程', NULL, 'JGJ266-2011', NULL, '中华人民共和国住房和城乡建设部', '2011-02-16', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-50');
+INSERT INTO `t_basis_of_preparation` VALUES ('144', '', '公路桥梁钢结构防腐涂装技术条件', NULL, 'JT/T722-2023', NULL, '交通运输部', '2023-03-01', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-51');
+INSERT INTO `t_basis_of_preparation` VALUES ('145', '', '中华人民共和国土地管理法实施条例中华人民共和国土地管理法实施', NULL, '国务院令第743号(2021修订)', NULL, '国务院', '2021-07-02', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-52');
+INSERT INTO `t_basis_of_preparation` VALUES ('146', '', '金属材料焊缝破坏性试验熔化焊接头焊缝金属纵向拉伸试验', NULL, 'GB/T2652-2022', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2022-12-30', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-53');
+INSERT INTO `t_basis_of_preparation` VALUES ('147', '', '起重机钢丝绳保养维护检验和报废', NULL, 'GB/T5972-2016', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2016-07-26', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '废止', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-54');
+INSERT INTO `t_basis_of_preparation` VALUES ('148', '', '电弧螺柱焊用圆柱头焊钉', NULL, 'GB/T10433-2002', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2002-12-05', NULL, NULL, NULL, '国家标准', NULL, NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-55');
+INSERT INTO `t_basis_of_preparation` VALUES ('149', '', '高处作业吊篮', NULL, 'GB/T19155-2017', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2017-12-29', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-56');
+INSERT INTO `t_basis_of_preparation` VALUES ('15', '', '钢结构用高强度大六角头螺栓', NULL, 'GB1228-2006', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2006-03-27', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-57');
+INSERT INTO `t_basis_of_preparation` VALUES ('150', '', '低合金高强度结构钢', NULL, 'GB/T1591-2018', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2018-06-07', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-58');
+INSERT INTO `t_basis_of_preparation` VALUES ('151', '', '架桥机使用说明书', NULL, NULL, NULL, '架桥机生产企业', NULL, NULL, NULL, NULL, '技术文件', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-59');
+INSERT INTO `t_basis_of_preparation` VALUES ('152', '', '公路水运工程淘汰危及生产安全施工工艺、设备和材料目录第一批条文说明', NULL, '交安监发〔2022〕37号', NULL, '交通运输部', '2022-04-08', NULL, NULL, NULL, '规范性文件', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-60');
+INSERT INTO `t_basis_of_preparation` VALUES ('153', '', '钢筋机械连接技术规程', NULL, 'JGJ107-2016', NULL, '中华人民共和国住房和城乡建设部', '2016-01-14', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-61');
+INSERT INTO `t_basis_of_preparation` VALUES ('154', '', '电力高处作业防坠器', NULL, 'DL/T1147-2009', NULL, '国家能源局', '2009-12-11', NULL, NULL, NULL, '电力行业标准', '参考规范', NULL, NULL, NULL, NULL, '废止', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-62');
+INSERT INTO `t_basis_of_preparation` VALUES ('155', '', '电力高处作业防坠器', NULL, 'DL/T1147-2018', NULL, '国家能源局', '2018-12-10', NULL, NULL, NULL, '电力行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-63');
+INSERT INTO `t_basis_of_preparation` VALUES ('156', '', '坠落防护速差自控器', NULL, 'GB24544-2023', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2023-05-23', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-64');
+INSERT INTO `t_basis_of_preparation` VALUES ('16', '', '架桥机安全规程', NULL, 'GB26469-2011', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2011-05-12', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-65');
+INSERT INTO `t_basis_of_preparation` VALUES ('17', '', '中华人民共和国建筑法', NULL, NULL, NULL, '全国人民代表大会常务委员会', '1997-11-01', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-66');
+INSERT INTO `t_basis_of_preparation` VALUES ('18', '', '钢结构用高强度垫圈', NULL, 'GB/T1230-2006', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2006-03-27', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-67');
+INSERT INTO `t_basis_of_preparation` VALUES ('19', '', '坠落防护挂点装置', NULL, 'GB30862-2014', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2014-07-24', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-68');
+INSERT INTO `t_basis_of_preparation` VALUES ('2', '', '公路水运工程临时用电技术规程', NULL, 'JTT1499-2024', NULL, '交通运输部', '2024-04-02', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-69');
+INSERT INTO `t_basis_of_preparation` VALUES ('20', '', '起重机械安全监控管理系统', NULL, 'GB/T28264-2017', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2017-12-29', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-70');
+INSERT INTO `t_basis_of_preparation` VALUES ('21', '', '坠落防护水平生命线装置', NULL, 'GB38454-2019', NULL, '国家 market监督管理总局、国家标准化管理委员会', '2019-12-31', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-71');
+INSERT INTO `t_basis_of_preparation` VALUES ('22', '', '起重机手势信号', NULL, 'GB5082-2019', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2019-12-10', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-72');
 INSERT INTO `t_basis_of_preparation` VALUES ('23', '', '江苏省高速公路施工标准化指南', NULL, 'DB32/T', NULL, '江苏省交通运输厅', '2010-01-01', NULL, NULL, NULL, '技术规范', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
 INSERT INTO `t_basis_of_preparation` VALUES ('24', '', '混凝土质量控制标准', NULL, 'GB50164-2011', NULL, '中华人民共和国住房和城乡建设部、国家质量监督检验检疫总局', '2011-07-29', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
 INSERT INTO `t_basis_of_preparation` VALUES ('25', '', '建设工程质量管理条例', NULL, '国务院令第279号', NULL, '国务院', '2000-01-30', NULL, NULL, NULL, '法律法规', '法规', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
@@ -144,58 +147,72 @@ INSERT INTO `t_basis_of_preparation` VALUES ('36', '', '公路钢筋混凝土及
 INSERT INTO `t_basis_of_preparation` VALUES ('37', '', '建筑施工临时支撑结构技术规范', NULL, 'JGJ300-2013', NULL, '中华人民共和国住房和城乡建设部', '2013-05-23', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
 INSERT INTO `t_basis_of_preparation` VALUES ('38', '', '起重机械超载保护装置', NULL, 'GB12602-2009', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2009-12-15', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
 INSERT INTO `t_basis_of_preparation` VALUES ('39', '', '公路桥梁抗震设计规范', NULL, 'JTGT2231-01-2020', NULL, '交通运输部', '2020-09-29', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('4', '', '起重机械安全规程', NULL, 'GB6067-2010', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2010-09-26', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('40', '', '公路工程施工安全监测与预警系统技术要求', NULL, 'JT/T1498-2024', NULL, '交通运输部', NULL, NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('41', '', '中华人民共和国合同法', NULL, NULL, NULL, '全国人民代表大会常务委员会', '1999-03-15', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '废止', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('42', '', '公路工程施工安全技术规范', NULL, 'JTGF90-2015', NULL, '交通运输部', '2015-03-31', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('43', '', '生产安全事故应急预案管理办法', NULL, NULL, NULL, '国家安全生产监督管理总局', '2016-06-03', NULL, NULL, NULL, '法律法规', '部门规章', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('44', '', '中华人民共和国水法', NULL, '主席令第48号', NULL, '全国人民代表大会常务委员会', '2016-07-02', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('45', '', '建筑结构荷载规范', NULL, 'GB50009-2012', NULL, '中华人民共和国住房和城乡建设部、国家质量监督检验检疫总局', '2012-05-15', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('46', '', '电气装置安装工程起重机电气装置施工及验收规范', NULL, 'GB50256-2014', NULL, '中华人民共和国住房和城乡建设部、国家质量监督检验检疫总局', '2014-12-02', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('47', '', '中华人民共和国安全生产法2021', NULL, '主席令第88号', NULL, '全国人民代表大会常务委员会', '2021-06-10', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('48', '', '工程测量规范', NULL, 'GB50026-2007', NULL, '中华人民共和国住房和城乡建设部、国家质量监督检验检疫总局', '2007-03-15', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('49', '', '焊接接头冲击试验方法', NULL, 'GB2650-89', NULL, '国家技术监督局', '1989-03-31', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '废止', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('5', '', '交通运输突发事件信息报告和处理办法', NULL, '交通运输部令2010年第8号', NULL, '交通运输部', '2010-02-05', NULL, NULL, NULL, '法律法规', '部门规章', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('50', '', '金属材料焊缝破坏性试验冲击试验', NULL, 'GB/T2650-2022', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2022-12-30', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('51', '', '钢的低倍组织及缺陷酸蚀检验法', NULL, 'GBT226-2015', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2015-12-10', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('52', '', '建筑施工扣件式钢管脚手架安全技术规范', NULL, 'JGJ130-2011', NULL, '中华人民共和国住房和城乡建设部', '2011-01-28', NULL, NULL, NULL, '技术规范', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('53', '', '建筑工程绿色施工规范', NULL, 'GB/T50905-2014', NULL, '中华人民共和国住房和城乡建设部、国家质量监督检验检疫总局', '2014-04-15', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('54', '', '起重机试验规范和程序', NULL, 'GB/T5905-2011', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2011-12-30', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('55', '', '中华人民共和国特种设备安全法', NULL, '主席令第4号', NULL, '全国人民代表大会常务委员会', '2013-06-29', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('56', '', '金属材料焊缝破坏性试验横向拉伸试验', NULL, 'GB/T2651-2023', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2023-05-23', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('57', '', '焊接接头硬度试验方法', NULL, 'GBT2654-2008', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2008-03-31', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('58', '', '关于打造公路水运品质工程的指导意见', NULL, '交安监发〔2016〕216号', NULL, '交通运输部', '2016-12-12', NULL, NULL, NULL, '规范性文件', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('59', '', '架桥机通用技术条件', NULL, 'JT/T1130-2017', NULL, '交通运输部', '2017-04-12', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('6', '', '焊缝无损检测超声检测技术、检测等级和评定', NULL, 'GB/T11345-2023', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2023-11-27', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('60', '', '公路水运工程安全生产监督管理办法', NULL, '交通运输部令2023年第2号', NULL, '交通运输部', '2023-01-25', NULL, NULL, NULL, '法律法规', '部门规章', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('61', '', '中华人民共和国环境保护税法实施条例', NULL, '国务院令第693号', NULL, '国务院', '2017-12-25', NULL, NULL, NULL, '法律法规', '行政法规', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('62', '', '建筑施工手册', NULL, NULL, NULL, '相关建筑工程类出版社', NULL, NULL, NULL, NULL, '产品说明书', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('63', '', '国家突发公共事件总体应急预案', NULL, '国发〔2006〕11号', NULL, '国务院', '2006-01-08', NULL, NULL, NULL, NULL, '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('64', '', '起重机用钢丝绳', NULL, 'GB/T34198-2017', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2017-09-29', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('65', '', '建设工程安全生产管理条列', NULL, '国务院令第393号', NULL, '国务院', '2003-11-24', NULL, NULL, NULL, '管理办法', '行政法规', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('66', '', '公路工程施工安全标志设置规范', NULL, 'JT/T1507-2024', NULL, '交通运输部', '2024-01-04', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('67', '', '施工现场机械设备检查技术规范', NULL, 'JGJ160-2016', NULL, '中华人民共和国住房和城乡建设部', '2016-01-22', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('68', '', '公路桥梁钢结构防腐涂装技术条件', NULL, 'JT/T722-2023', NULL, '交通运输部', '2023-03-01', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('69', '', '企业安全生产应急管理九条规定', NULL, '安监总办〔2015〕14号', NULL, '原国家安全生产监督管理总局', '2015-02-28', NULL, NULL, NULL, '法律法规', '规范性文件', NULL, NULL, NULL, NULL, '废止', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('7', '', '铸钢丸', NULL, 'YBT5149-93', NULL, '冶金工业部', '1993-01-01', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '废止', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('70', '', '施工现场临时用电安全技术规范', NULL, 'JGJ46-2005', NULL, '中华人民共和国住房和城乡建设部', '2005-01-13', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('71', '', '建筑施工企业安全生产管理规范', NULL, 'GB50656-2011', NULL, '中华人民共和国住房和城乡建设部、国家质量监督检验检疫总局', '2011-07-26', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('72', '', '路桥施工计算手册', NULL, NULL, NULL, '相关工程类出版社', NULL, NULL, NULL, NULL, '产品说明书', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('73', '', '建筑施工起重吊装安全技术规范', NULL, 'JGJ276-2012', NULL, '中华人民共和国住房和城乡建设部', '2012-03-15', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('74', '', '建设工程施工现场环境与卫生标准', NULL, 'JGJ146-2013', NULL, '中华人民共和国住房和城乡建设部', '2013-05-13', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('75', '', '非合金钢及细晶粒钢焊条', NULL, 'GB/T5117-2012', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2012-11-05', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('76', '', '公路交通安全设施设计细则', NULL, 'JTG/TD81-2017', NULL, '交通运输部', '2017-06-02', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('77', '', '重要用途钢丝绳', NULL, 'GB8918-2006', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2006-08-01', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('78', '', '钢筋焊接及验收规程', NULL, 'JGJ18-2012', NULL, '中华人民共和国住房和城乡建设部', '2012-03-06', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('79', '', '危险性较大的分部分项工程安全管理规定', NULL, '住建部令第37号', NULL, '中华人民共和国住房和城乡建设部', '2018-03-08', NULL, NULL, NULL, '法律法规', '部门规章', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('8', '', '安全网', NULL, 'GB5725-2009', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2009-04-24', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('80', '', '热轧钢板和钢带的尺寸、外形、重量及允许偏差', NULL, 'GB/T709-2019', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2019-12-10', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('81', '', '钢筋焊接接头试验方法标准', NULL, 'JGJ/T27-2014', NULL, '中华人民共和国住房和城乡建设部', '2014-01-10', NULL, NULL, NULL, '法律法规', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('82', '', '公路工程质量检验评定标准', NULL, 'JTGF80/1-2017', NULL, '交通运输部', '2017-09-26', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('83', '', '住房城乡建设部办公厅关于实施危险性较大的分部分项工程安全管理规定有关问题的通知', NULL, '建办质〔2018〕31号', NULL, '住房和城乡建设部办公厅', '2018-05-17', NULL, NULL, NULL, '法律法规', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('84', '', '钢结构设计规范', NULL, 'GB/T22395-2022', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2022-12-30', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('85', '', '坠落防护安全带', NULL, 'GB6095-2021', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2021-04-30', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
-INSERT INTO `t_basis_of_preparation` VALUES ('86', '', '中华人民共和国突发事件应对法', NULL, '主席令第69号(2019修订)', NULL, '全国人民代表大会常务委员会', '2019-12-28', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
+INSERT INTO `t_basis_of_preparation` VALUES ('4', '', '起重机械安全规程', NULL, 'GB6067-2010', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2010-09-26', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-57');
+INSERT INTO `t_basis_of_preparation` VALUES ('40', '', '公路工程施工安全监测与预警系统技术要求', NULL, 'JT/T1498-2024', NULL, '交通运输部', NULL, NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-58');
+INSERT INTO `t_basis_of_preparation` VALUES ('41', '', '中华人民共和国合同法', NULL, NULL, NULL, '全国人民代表大会常务委员会', '1999-03-15', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '废止', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-59');
+INSERT INTO `t_basis_of_preparation` VALUES ('42', '', '公路工程施工安全技术规范', NULL, 'JTGF90-2015', NULL, '交通运输部', '2015-03-31', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-60');
+INSERT INTO `t_basis_of_preparation` VALUES ('43', '', '生产安全事故应急预案管理办法', NULL, NULL, NULL, '国家安全生产监督管理总局', '2016-06-03', NULL, NULL, NULL, '法律法规', '部门规章', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-61');
+INSERT INTO `t_basis_of_preparation` VALUES ('44', '', '中华人民共和国水法', NULL, '主席令第48号', NULL, '全国人民代表大会常务委员会', '2016-07-02', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-62');
+INSERT INTO `t_basis_of_preparation` VALUES ('45', '', '建筑结构荷载规范', NULL, 'GB50009-2012', NULL, '中华人民共和国住房和城乡建设部、国家质量监督检验检疫总局', '2012-05-15', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-63');
+INSERT INTO `t_basis_of_preparation` VALUES ('46', '', '电气装置安装工程起重机电气装置施工及验收规范', NULL, 'GB50256-2014', NULL, '中华人民共和国住房和城乡建设部、国家质量监督检验检疫总局', '2014-12-02', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-64');
+INSERT INTO `t_basis_of_preparation` VALUES ('47', '', '中华人民共和国安全生产法2021', NULL, '主席令第88号', NULL, '全国人民代表大会常务委员会', '2021-06-10', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-65');
+INSERT INTO `t_basis_of_preparation` VALUES ('48', '', '工程测量规范', NULL, 'GB50026-2007', NULL, '中华人民共和国住房和城乡建设部、国家质量监督检验检疫总局', '2007-03-15', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-66');
+INSERT INTO `t_basis_of_preparation` VALUES ('49', '', '焊接接头冲击试验方法', NULL, 'GB2650-89', NULL, '国家技术监督局', '1989-03-31', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '废止', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-67');
+INSERT INTO `t_basis_of_preparation` VALUES ('5', '', '交通运输突发事件信息报告和处理办法', NULL, '交通运输部令2010年第8号', NULL, '交通运输部', '2010-02-05', NULL, NULL, NULL, '法律法规', '部门规章', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-68');
+INSERT INTO `t_basis_of_preparation` VALUES ('50', '', '金属材料焊缝破坏性试验冲击试验', NULL, 'GB/T2650-2022', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2022-12-30', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-69');
+INSERT INTO `t_basis_of_preparation` VALUES ('51', '', '钢的低倍组织及缺陷酸蚀检验法', NULL, 'GBT226-2015', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2015-12-10', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-70');
+INSERT INTO `t_basis_of_preparation` VALUES ('52', '', '建筑施工扣件式钢管脚手架安全技术规范', NULL, 'JGJ130-2011', NULL, '中华人民共和国住房和城乡建设部', '2011-01-28', NULL, NULL, NULL, '技术规范', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-71');
+INSERT INTO `t_basis_of_preparation` VALUES ('53', '', '建筑工程绿色施工规范', NULL, 'GB/T50905-2014', NULL, '中华人民共和国住房和城乡建设部、国家质量监督检验检疫总局', '2014-04-15', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-72');
+INSERT INTO `t_basis_of_preparation` VALUES ('54', '', '起重机试验规范和程序', NULL, 'GB/T5905-2011', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2011-12-30', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-73');
+INSERT INTO `t_basis_of_preparation` VALUES ('55', '', '中华人民共和国特种设备安全法', NULL, '主席令第4号', NULL, '全国人民代表大会常务委员会', '2013-06-29', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-74');
+INSERT INTO `t_basis_of_preparation` VALUES ('56', '', '金属材料焊缝破坏性试验横向拉伸试验', NULL, 'GB/T2651-2023', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2023-05-23', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-75');
+INSERT INTO `t_basis_of_preparation` VALUES ('57', '', '焊接接头硬度试验方法', NULL, 'GBT2654-2008', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2008-03-31', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-76');
+INSERT INTO `t_basis_of_preparation` VALUES ('58', '', '关于打造公路水运品质工程的指导意见', NULL, '交安监发〔2016〕216号', NULL, '交通运输部', '2016-12-12', NULL, NULL, NULL, '规范性文件', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-77');
+INSERT INTO `t_basis_of_preparation` VALUES ('59', '', '架桥机通用技术条件', NULL, 'JT/T1130-2017', NULL, '交通运输部', '2017-04-12', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-78');
+INSERT INTO `t_basis_of_preparation` VALUES ('6', '', '焊缝无损检测超声检测技术、检测等级和评定', NULL, 'GB/T11345-2023', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2023-11-27', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-79');
+INSERT INTO `t_basis_of_preparation` VALUES ('60', '', '公路水运工程安全生产监督管理办法', NULL, '交通运输部令2023年第2号', NULL, '交通运输部', '2023-01-25', NULL, NULL, NULL, '法律法规', '部门规章', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-80');
+INSERT INTO `t_basis_of_preparation` VALUES ('61', '', '中华人民共和国环境保护税法实施条例', NULL, '国务院令第693号', NULL, '国务院', '2017-12-25', NULL, NULL, NULL, '法律法规', '行政法规', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-81');
+INSERT INTO `t_basis_of_preparation` VALUES ('62', '', '建筑施工手册', NULL, NULL, NULL, '相关建筑工程类出版社', NULL, NULL, NULL, NULL, '产品说明书', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-82');
+INSERT INTO `t_basis_of_preparation` VALUES ('63', '', '国家突发公共事件总体应急预案', NULL, '国发〔2006〕11号', NULL, '国务院', '2006-01-08', NULL, NULL, NULL, NULL, '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-83');
+INSERT INTO `t_basis_of_preparation` VALUES ('64', '', '起重机用钢丝绳', NULL, 'GB/T34198-2017', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2017-09-29', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-84');
+INSERT INTO `t_basis_of_preparation` VALUES ('65', '', '建设工程安全生产管理条列', NULL, '国务院令第393号', NULL, '国务院', '2003-11-24', NULL, NULL, NULL, '管理办法', '行政法规', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-85');
+INSERT INTO `t_basis_of_preparation` VALUES ('66', '', '公路工程施工安全标志设置规范', NULL, 'JT/T1507-2024', NULL, '交通运输部', '2024-01-04', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-86');
+INSERT INTO `t_basis_of_preparation` VALUES ('67', '', '施工现场机械设备检查技术规范', NULL, 'JGJ160-2016', NULL, '中华人民共和国住房和城乡建设部', '2016-01-22', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-87');
+INSERT INTO `t_basis_of_preparation` VALUES ('68', '', '公路桥梁钢结构防腐涂装技术条件', NULL, 'JT/T722-2023', NULL, '交通运输部', '2023-03-01', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-88');
+INSERT INTO `t_basis_of_preparation` VALUES ('69', '', '企业安全生产应急管理九条规定', NULL, '安监总办〔2015〕14号', NULL, '原国家安全生产监督管理总局', '2015-02-28', NULL, NULL, NULL, '法律法规', '规范性文件', NULL, NULL, NULL, NULL, '废止', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-89');
+INSERT INTO `t_basis_of_preparation` VALUES ('7', '', '铸钢丸', NULL, 'YBT5149-93', NULL, '冶金工业部', '1993-01-01', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '废止', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-90');
+INSERT INTO `t_basis_of_preparation` VALUES ('70', '', '施工现场临时用电安全技术规范', NULL, 'JGJ46-2005', NULL, '中华人民共和国住房和城乡建设部', '2005-01-13', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-91');
+INSERT INTO `t_basis_of_preparation` VALUES ('71', '', '建筑施工企业安全生产管理规范', NULL, 'GB50656-2011', NULL, '中华人民共和国住房和城乡建设部、国家质量监督检验检疫总局', '2011-07-26', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-92');
+INSERT INTO `t_basis_of_preparation` VALUES ('72', '', '路桥施工计算手册', NULL, NULL, NULL, '相关工程类出版社', NULL, NULL, NULL, NULL, '产品说明书', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-93');
+INSERT INTO `t_basis_of_preparation` VALUES ('73', '', '建筑施工起重吊装安全技术规范', NULL, 'JGJ276-2012', NULL, '中华人民共和国住房和城乡建设部', '2012-03-15', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-94');
+INSERT INTO `t_basis_of_preparation` VALUES ('74', '', '建设工程施工现场环境与卫生标准', NULL, 'JGJ146-2013', NULL, '中华人民共和国住房和城乡建设部', '2013-05-13', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-95');
+INSERT INTO `t_basis_of_preparation` VALUES ('75', '', '非合金钢及细晶粒钢焊条', NULL, 'GB/T5117-2012', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2012-11-05', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-96');
+INSERT INTO `t_basis_of_preparation` VALUES ('76', '', '公路交通安全设施设计细则', NULL, 'JTG/TD81-2017', NULL, '交通运输部', '2017-06-02', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-97');
+INSERT INTO `t_basis_of_preparation` VALUES ('77', '', '重要用途钢丝绳', NULL, 'GB8918-2006', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2006-08-01', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-98');
+INSERT INTO `t_basis_of_preparation` VALUES ('78', '', '钢筋焊接及验收规程', NULL, 'JGJ18-2012', NULL, '中华人民共和国住房和城乡建设部', '2012-03-06', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-99');
+INSERT INTO `t_basis_of_preparation` VALUES ('79', '', '危险性较大的分部分项工程安全管理规定', NULL, '住建部令第37号', NULL, '中华人民共和国住房和城乡建设部', '2018-03-08', NULL, NULL, NULL, '法律法规', '部门规章', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-100');
+INSERT INTO `t_basis_of_preparation` VALUES ('8', '', '安全网', NULL, 'GB5725-2009', NULL, '国家质量监督检验检疫总局、国家标准化管理委员会', '2009-04-24', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-101');
+INSERT INTO `t_basis_of_preparation` VALUES ('80', '', '热轧钢板和钢带的尺寸、外形、重量及允许偏差', NULL, 'GB/T709-2019', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2019-12-10', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-102');
+INSERT INTO `t_basis_of_preparation` VALUES ('81', '', '钢筋焊接接头试验方法标准', NULL, 'JGJ/T27-2014', NULL, '中华人民共和国住房和城乡建设部', '2014-01-10', NULL, NULL, NULL, '法律法规', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-103');
+INSERT INTO `t_basis_of_preparation` VALUES ('82', '', '公路工程质量检验评定标准', NULL, 'JTGF80/1-2017', NULL, '交通运输部', '2017-09-26', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-104');
+INSERT INTO `t_basis_of_preparation` VALUES ('83', '', '住房城乡建设部办公厅关于实施危险性较大的分部分项工程安全管理规定有关问题的通知', NULL, '建办质〔2018〕31号', NULL, '住房和城乡建设部办公厅', '2018-05-17', NULL, NULL, NULL, '法律法规', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-105');
+INSERT INTO `t_basis_of_preparation` VALUES ('84', '', '钢结构设计规范', NULL, 'GB/T22395-2022', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2022-12-30', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-106');
+INSERT INTO `t_basis_of_preparation` VALUES ('85', '', '坠落防护安全带', NULL, 'GB6095-2021', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2021-04-30', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-107');
+INSERT INTO `t_basis_of_preparation` VALUES ('86', '', '中华人民共和国突发事件应对法', NULL, '主席令第69号(2019修订)', NULL, '全国人民代表大会常务委员会', '2019-12-28', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-108');
+INSERT INTO `t_basis_of_preparation` VALUES ('87', '', '四川省危险性较大的分部分项工程安全管理规定实施细则', NULL, NULL, NULL, '四川省住房和城乡建设厅', NULL, NULL, NULL, NULL, '法律法规', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-109');
+INSERT INTO `t_basis_of_preparation` VALUES ('88', '', '公路桥梁支座检测技术规程', NULL, 'JTG/TJ57-2020', NULL, '交通运输部', '2020-04-28', NULL, NULL, NULL, '技术规范', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-110');
+INSERT INTO `t_basis_of_preparation` VALUES ('89', '', '钢结构高强度螺栓连接技术规程', NULL, 'JGJ82-2011', NULL, '中华人民共和国住房和城乡建设部', '2011-01-07', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-111');
+INSERT INTO `t_basis_of_preparation` VALUES ('9', '', '移动式起重机吊装作业安全管理规范', NULL, 'Q/SY1248-2009', NULL, '中国石油天然气集团公司', '2009-01-01', NULL, NULL, NULL, '企业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-112');
+INSERT INTO `t_basis_of_preparation` VALUES ('90', '', '中华人民共和国固体废物污染环境防治法', NULL, '主席令第43号(2020修订)', NULL, '全国人民代表大会常务委员会', '2020-04-29', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-113');
+INSERT INTO `t_basis_of_preparation` VALUES ('91', '', '建筑施工安全技术统一规范', NULL, 'GB50870-2013', NULL, '中华人民共和国住房和城乡建设部、国家质量监督检验检疫总局', '2013-05-10', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-114');
+INSERT INTO `t_basis_of_preparation` VALUES ('92', '', '焊接接头弯曲试验方法', NULL, 'GB/T2653-2008', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2008-03-31', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-115');
+INSERT INTO `t_basis_of_preparation` VALUES ('93', '', 'jb8716-1998汽车起重机和轮胎起重机安全规程', NULL, 'JB8716-1998', NULL, '原国家机械工业局', '1998-11-16', NULL, NULL, NULL, '技术规范', '参考规范', NULL, NULL, NULL, NULL, '废止', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-116');
+INSERT INTO `t_basis_of_preparation` VALUES ('94', '', '中华人民共和国环境噪声污染防治法', NULL, '主席令第77号(2021修订)', NULL, '全国人民代表大会常务委员会', '2021-12-24', NULL, NULL, NULL, '法律法规', '法律', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-117');
+INSERT INTO `t_basis_of_preparation` VALUES ('95', '', '公路工程技术标准', NULL, 'JTGB01-2014', NULL, '交通运输部', '2014-09-30', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-118');
+INSERT INTO `t_basis_of_preparation` VALUES ('96', '', '特种设备安全监督检查办法', NULL, '市场监管总局令第57号', NULL, '国家市场监督管理总局', '2022-06-23', NULL, NULL, NULL, '法律法规', '部门规章', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-119');
+INSERT INTO `t_basis_of_preparation` VALUES ('97', '', '熔化极气体保护电弧焊用非合金钢及细晶粒钢实心焊丝', NULL, 'GB/T8110-2020', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2020-03-31', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-120');
+INSERT INTO `t_basis_of_preparation` VALUES ('98', '', '特种设备安全监察条例', NULL, '国务院令第549号(2009修订)', NULL, '国务院', '2009-01-24', NULL, NULL, NULL, '法律法规', '行政法规', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-121');
+INSERT INTO `t_basis_of_preparation` VALUES ('99', '', '起重机钢丝绳保养、维护、检验和报废', NULL, 'GB/T5972-2023', NULL, '国家市场监督管理总局、国家标准化管理委员会', '2023-03-17', NULL, NULL, NULL, '国家标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL, 'doc-sample-basis-122');
 INSERT INTO `t_basis_of_preparation` VALUES ('87', '', '四川省危险性较大的分部分项工程安全管理规定实施细则', NULL, NULL, NULL, '四川省住房和城乡建设厅', NULL, NULL, NULL, NULL, '法律法规', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
 INSERT INTO `t_basis_of_preparation` VALUES ('88', '', '公路桥梁支座检测技术规程', NULL, 'JTG/TJ57-2020', NULL, '交通运输部', '2020-04-28', NULL, NULL, NULL, '技术规范', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);
 INSERT INTO `t_basis_of_preparation` VALUES ('89', '', '钢结构高强度螺栓连接技术规程', NULL, 'JGJ82-2011', NULL, '中华人民共和国住房和城乡建设部', '2011-01-07', NULL, NULL, NULL, '行业标准', '参考规范', NULL, NULL, NULL, NULL, '现行', '2025-12-15 11:30:46', '2025-12-15 11:30:46', NULL);

+ 3 - 6
scripts/lq_db_local.sql

@@ -846,12 +846,10 @@ CREATE TABLE `t_basis_of_preparation`  (
   `status` varchar(10) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL COMMENT '状态:current-现行,作废-void',
   `created_at` timestamp(0) NULL DEFAULT CURRENT_TIMESTAMP(0) COMMENT '记录创建时间',
   `updated_at` timestamp(0) NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '记录更新时间',
+  `document_id` varchar(36) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL COMMENT '关联文档ID',
   PRIMARY KEY (`id`) USING BTREE,
-  INDEX `idx_standard_no`(`standard_no`) USING BTREE COMMENT '标准编号索引',
-  INDEX `idx_chinese_name`(`chinese_name`(100)) USING BTREE COMMENT '中文名称索引',
-  INDEX `idx_release_date`(`release_date`) USING BTREE COMMENT '发布日期索引',
-  INDEX `idx_document_type`(`document_type`) USING BTREE COMMENT '标准类型索引',
-  INDEX `idx_professional_field`(`professional_field`) USING BTREE COMMENT '专业领域索引'
+  INDEX `idx_basis_document_id`(`document_id`) USING BTREE,
+  CONSTRAINT `fk_basis_document` FOREIGN KEY (`document_id`) REFERENCES `t_document_main` (`id`) ON DELETE CASCADE ON UPDATE CASCADE
 ) ENGINE = InnoDB AUTO_INCREMENT = 22 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci COMMENT = '编制依据基本信息表' ROW_FORMAT = Dynamic;
 
 -- ----------------------------
@@ -902,7 +900,6 @@ CREATE TABLE `t_document_main`  (
   `conversion_error` text CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL,
   `whether_to_enter` tinyint(0) NULL DEFAULT 0,
   `source_type` enum('basis','work','job') CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NOT NULL,
-  `source_id` char(36) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NOT NULL,
   `file_url` text CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL,
   `file_extension` varchar(10) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL,
   `content` text CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL,

+ 13 - 132
scripts/lq_oauth_db.sql

@@ -871,7 +871,10 @@ CREATE TABLE `t_basis_of_preparation`  (
   `updated_time` datetime(0) NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '修改时间,默认当前时间',
   `file_url` varchar(500) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL,
   `content` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL,
-  PRIMARY KEY (`id`) USING BTREE
+  `document_id` varchar(36) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL COMMENT '关联文档ID',
+  PRIMARY KEY (`id`) USING BTREE,
+  INDEX `idx_basis_document_id`(`document_id`) USING BTREE,
+  CONSTRAINT `fk_basis_document` FOREIGN KEY (`document_id`) REFERENCES `t_samp_document_main` (`id`) ON DELETE CASCADE ON UPDATE CASCADE
 ) ENGINE = InnoDB CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci COMMENT = '编制依据' ROW_FORMAT = Dynamic;
 
 -- ----------------------------
@@ -887,7 +890,6 @@ DROP TABLE IF EXISTS `t_samp_document_main`;
 CREATE TABLE `t_samp_document_main`  (
   `id` varchar(36) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NOT NULL COMMENT '主键',
   `source_type` enum('basis','work','job') CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NOT NULL COMMENT '所属类型',
-  `source_id` varchar(36) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NOT NULL COMMENT '所属ID',
   `title` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NOT NULL COMMENT '文档名称',
   `conversion_status` int(0) NOT NULL DEFAULT 0 COMMENT '状态: 0-待转换, 1-转换中, 2-完成, 3-失败',
   `whether_to_enter` int(0) NOT NULL DEFAULT 0 COMMENT '是否入库: 0-未入库, 1-已入库',
@@ -900,8 +902,7 @@ CREATE TABLE `t_samp_document_main`  (
   `created_time` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0) COMMENT '创建时间',
   `updated_by` varchar(36) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL COMMENT '修改人',
   `updated_time` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '修改时间',
-  PRIMARY KEY (`id`) USING BTREE,
-  UNIQUE INDEX `idx_source_id_type`(`source_id`, `source_type`) USING BTREE
+  PRIMARY KEY (`id`) USING BTREE
 ) ENGINE = InnoDB CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci COMMENT = '文档记录表' ROW_FORMAT = Dynamic;
 
 -- ----------------------------
@@ -926,7 +927,10 @@ CREATE TABLE `t_job_of_preparation`  (
   `updated_time` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '修改时间,默认 CURRENT_TIMESTAMP,更新时自动刷新',
   `file_url` varchar(500) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL,
   `content` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL,
-  PRIMARY KEY (`id`) USING BTREE
+  `document_id` varchar(36) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL COMMENT '关联文档ID',
+  PRIMARY KEY (`id`) USING BTREE,
+  INDEX `idx_job_document_id`(`document_id`) USING BTREE,
+  CONSTRAINT `fk_job_document` FOREIGN KEY (`document_id`) REFERENCES `t_samp_document_main` (`id`) ON DELETE CASCADE ON UPDATE CASCADE
 ) ENGINE = InnoDB CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci COMMENT = '办公制度' ROW_FORMAT = Dynamic;
 
 -- ----------------------------
@@ -961,7 +965,10 @@ CREATE TABLE `t_work_of_preparation`  (
   `updated_time` datetime(0) NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '修改时间,默认 CURRENT_TIMESTAMP,更新时自动刷新',
   `file_url` varchar(500) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL,
   `content` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL,
-  PRIMARY KEY (`id`) USING BTREE
+  `document_id` varchar(36) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NULL DEFAULT NULL COMMENT '关联文档ID',
+  PRIMARY KEY (`id`) USING BTREE,
+  INDEX `idx_work_document_id`(`document_id`) USING BTREE,
+  CONSTRAINT `fk_work_document` FOREIGN KEY (`document_id`) REFERENCES `t_samp_document_main` (`id`) ON DELETE CASCADE ON UPDATE CASCADE
 ) ENGINE = InnoDB CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci COMMENT = '施工方案\r\n' ROW_FORMAT = Dynamic;
 
 -- ----------------------------
@@ -1112,130 +1119,4 @@ INSERT INTO `users` VALUES ('test', 'test@163.com', '', 'sha256$20b8cc7a83116a7b
 INSERT INTO `users` VALUES ('zhangsan', 'zhangsan@qq.com', '143454545', 'sha256$260c033ace1b0dc1bf21caa8b986d69e$cdc2342aafd284f0d6c8df00e908d7b7b47a59ff022aa2968aaa435941e00086', NULL, 1, 0, NULL, NULL, NULL, NULL, 'd3e906de-b9a6-4a78-a643-233252252776', '2025-12-28 21:45:15', '2025-12-28 21:45:15', NULL);
 INSERT INTO `users` VALUES ('admin', 'admin@example.com', NULL, 'sha256$fc7bcee8f0dd0566e809d1920b3524c7$149986dbf144e9aebc6a282959075db9a822012ab76813fb6b71509254b3c7ee', NULL, 1, 1, NULL, NULL, 0, NULL, 'ed6a79d3-0083-4d81-8b48-fc522f686f74', '2025-12-20 13:53:13', '2025-12-28 18:05:37', 0);
 
--- ----------------------------
--- Triggers structure for table t_samp_standard_base_info
--- ----------------------------
-DROP TRIGGER IF EXISTS `trg_basis_after_delete`;
-delimiter ;;
-CREATE TRIGGER `trg_basis_after_delete` AFTER DELETE ON `t_samp_standard_base_info` FOR EACH ROW BEGIN
-                    DELETE FROM t_samp_document_main WHERE source_id = OLD.id AND source_type = 'basis';
-                END
-;;
-delimiter ;
-
--- ----------------------------
--- Triggers structure for table t_samp_standard_base_info
--- ----------------------------
-DROP TRIGGER IF EXISTS `trg_basis_after_insert`;
-delimiter ;;
-CREATE TRIGGER `trg_basis_after_insert` AFTER INSERT ON `t_samp_standard_base_info` FOR EACH ROW BEGIN
-                    INSERT INTO t_samp_document_main (
-                        id, title, file_url, created_by, created_time, updated_by, updated_time, source_type, source_id, whether_to_enter
-                    ) VALUES (
-                        UUID(), NEW.chinese_name, NEW.file_url, NEW.created_by, NEW.created_time, IFNULL(NEW.updated_by, NEW.created_by), NEW.updated_time, 'basis', NEW.id, 0
-                    );
-                END
-;;
-delimiter ;
-
--- ----------------------------
--- Triggers structure for table t_samp_standard_base_info
--- ----------------------------
-DROP TRIGGER IF EXISTS `trg_basis_after_update`;
-delimiter ;;
-CREATE TRIGGER `trg_basis_after_update` AFTER UPDATE ON `t_samp_standard_base_info` FOR EACH ROW BEGIN
-                    UPDATE t_samp_document_main SET
-                        title = NEW.chinese_name,
-                        file_url = NEW.file_url,
-                        updated_by = IFNULL(NEW.updated_by, NEW.created_by),
-                        updated_time = NEW.updated_time
-                    WHERE source_id = NEW.id AND source_type = 'basis';
-                END
-;;
-delimiter ;
-
--- ----------------------------
--- Triggers structure for table t_samp_office_regulations
--- ----------------------------
-DROP TRIGGER IF EXISTS `trg_job_after_delete`;
-delimiter ;;
-CREATE TRIGGER `trg_job_after_delete` AFTER DELETE ON `t_samp_office_regulations` FOR EACH ROW BEGIN
-                    DELETE FROM t_samp_document_main WHERE source_id = OLD.id AND source_type = 'job';
-                END
-;;
-delimiter ;
-
--- ----------------------------
--- Triggers structure for table t_samp_office_regulations
--- ----------------------------
-DROP TRIGGER IF EXISTS `trg_job_after_insert`;
-delimiter ;;
-CREATE TRIGGER `trg_job_after_insert` AFTER INSERT ON `t_samp_office_regulations` FOR EACH ROW BEGIN
-                    INSERT INTO t_samp_document_main (
-                        id, title, file_url, created_by, created_time, updated_by, updated_time, source_type, source_id, whether_to_enter
-                    ) VALUES (
-                        UUID(), NEW.file_name, NEW.file_url, NEW.created_by, NEW.created_time, IFNULL(NEW.updated_by, NEW.created_by), NEW.updated_time, 'job', NEW.id, 0
-                    );
-                END
-;;
-delimiter ;
-
--- ----------------------------
--- Triggers structure for table t_samp_office_regulations
--- ----------------------------
-DROP TRIGGER IF EXISTS `trg_job_after_update`;
-delimiter ;;
-CREATE TRIGGER `trg_job_after_update` AFTER UPDATE ON `t_samp_office_regulations` FOR EACH ROW BEGIN
-                    UPDATE t_samp_document_main SET
-                        title = NEW.file_name,
-                        file_url = NEW.file_url,
-                        updated_by = IFNULL(NEW.updated_by, NEW.created_by),
-                        updated_time = NEW.updated_time
-                    WHERE source_id = NEW.id AND source_type = 'job';
-                END
-;;
-delimiter ;
-
--- ----------------------------
--- Triggers structure for table t_samp_construction_plan_base_info
--- ----------------------------
-DROP TRIGGER IF EXISTS `trg_work_after_delete`;
-delimiter ;;
-CREATE TRIGGER `trg_work_after_delete` AFTER DELETE ON `t_samp_construction_plan_base_info` FOR EACH ROW BEGIN
-                    DELETE FROM t_samp_document_main WHERE source_id = OLD.id AND source_type = 'work';
-                END
-;;
-delimiter ;
-
--- ----------------------------
--- Triggers structure for table t_samp_construction_plan_base_info
--- ----------------------------
-DROP TRIGGER IF EXISTS `trg_work_after_insert`;
-delimiter ;;
-CREATE TRIGGER `trg_work_after_insert` AFTER INSERT ON `t_samp_construction_plan_base_info` FOR EACH ROW BEGIN
-                    INSERT INTO t_samp_document_main (
-                        id, title, file_url, created_by, created_time, updated_by, updated_time, source_type, source_id, whether_to_enter
-                    ) VALUES (
-                        UUID(), NEW.plan_name, NEW.file_url, NEW.created_by, NEW.created_time, IFNULL(NEW.updated_by, NEW.created_by), NEW.updated_time, 'work', NEW.id, 0
-                    );
-                END
-;;
-delimiter ;
-
--- ----------------------------
--- Triggers structure for table t_samp_construction_plan_base_info
--- ----------------------------
-DROP TRIGGER IF EXISTS `trg_work_after_update`;
-delimiter ;;
-CREATE TRIGGER `trg_work_after_update` AFTER UPDATE ON `t_samp_construction_plan_base_info` FOR EACH ROW BEGIN
-                    UPDATE t_samp_document_main SET
-                        title = NEW.plan_name,
-                        file_url = NEW.file_url,
-                        updated_by = IFNULL(NEW.updated_by, NEW.created_by),
-                        updated_time = NEW.updated_time
-                    WHERE source_id = NEW.id AND source_type = 'work';
-                END
-;;
-delimiter ;
-
 SET FOREIGN_KEY_CHECKS = 1;

+ 23 - 16
scripts/lq_oauth_db_20260123.sql

@@ -272,13 +272,16 @@ CREATE TABLE `t_samp_construction_plan_base_info`  (
   `created_time` datetime(0) NULL DEFAULT CURRENT_TIMESTAMP(0) COMMENT '创建时间,默认当前时间',
   `updated_by` varchar(36) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL COMMENT '修改人',
   `updated_time` datetime(0) NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '修改时间,默认当前时间',
-  PRIMARY KEY (`id`) USING BTREE
+  `document_id` varchar(36) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NOT NULL COMMENT '关联文档ID',
+  PRIMARY KEY (`id`) USING BTREE,
+  INDEX `idx_plan_document_id`(`document_id`) USING BTREE,
+  CONSTRAINT `fk_plan_document` FOREIGN KEY (`document_id`) REFERENCES `t_samp_document_main` (`id`) ON DELETE CASCADE ON UPDATE CASCADE
 ) ENGINE = InnoDB CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci COMMENT = '施工方案知识库' ROW_FORMAT = Dynamic;
 
 -- ----------------------------
 -- Records of t_samp_construction_plan_base_info
 -- ----------------------------
-INSERT INTO `t_samp_construction_plan_base_info` VALUES ('d4ef7acf-c28c-45cb-9214-0ddce85cf575', '方案', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'admin', '2026-01-15 09:47:47', 'system', '2026-01-15 09:47:47');
+INSERT INTO `t_samp_construction_plan_base_info` VALUES ('d4ef7acf-c28c-45cb-9214-0ddce85cf575', '方案', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'admin', '2026-01-15 09:47:47', 'system', '2026-01-15 09:47:47', 'doc-sample-1');
 
 -- ----------------------------
 -- Table structure for t_samp_doc_category
@@ -320,7 +323,6 @@ DROP TABLE IF EXISTS `t_samp_document_main`;
 CREATE TABLE `t_samp_document_main`  (
   `id` varchar(36) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NOT NULL COMMENT '主键',
   `source_type` enum('basis','work','job') CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NOT NULL COMMENT '所属类型',
-  `source_id` varchar(36) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NOT NULL COMMENT '所属ID',
   `title` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NOT NULL COMMENT '文档名称',
   `conversion_status` int(0) NOT NULL DEFAULT 0 COMMENT '状态: 0-待转换, 1-转换中, 2-完成, 3-失败',
   `whether_to_enter` int(0) NOT NULL DEFAULT 0 COMMENT '是否入库: 0-未入库, 1-已入库',
@@ -333,8 +335,7 @@ CREATE TABLE `t_samp_document_main`  (
   `created_time` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0) COMMENT '创建时间',
   `updated_by` varchar(36) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL COMMENT '修改人',
   `updated_time` datetime(0) NOT NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '修改时间',
-  PRIMARY KEY (`id`) USING BTREE,
-  UNIQUE INDEX `idx_source_id_type`(`source_id`, `source_type`) USING BTREE
+  PRIMARY KEY (`id`) USING BTREE
 ) ENGINE = InnoDB CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci COMMENT = '文档记录表' ROW_FORMAT = Dynamic;
 
 -- ----------------------------
@@ -423,13 +424,16 @@ CREATE TABLE `t_samp_office_regulations`  (
   `created_time` datetime(0) NULL DEFAULT CURRENT_TIMESTAMP(0) COMMENT '创建时间,默认当前时间',
   `updated_by` varchar(36) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL COMMENT '修改人',
   `updated_time` datetime(0) NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '修改时间,默认当前时间',
-  PRIMARY KEY (`id`) USING BTREE
+  `document_id` varchar(36) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NOT NULL COMMENT '关联文档ID',
+  PRIMARY KEY (`id`) USING BTREE,
+  INDEX `idx_office_document_id`(`document_id`) USING BTREE,
+  CONSTRAINT `fk_office_document` FOREIGN KEY (`document_id`) REFERENCES `t_samp_document_main` (`id`) ON DELETE CASCADE ON UPDATE CASCADE
 ) ENGINE = InnoDB CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci COMMENT = '办公制度知识库' ROW_FORMAT = Dynamic;
 
 -- ----------------------------
 -- Records of t_samp_office_regulations
 -- ----------------------------
-INSERT INTO `t_samp_office_regulations` VALUES ('351b063f-eb20-4cbd-abf6-7a1097adbebf', '办公', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'admin', '2026-01-15 09:48:00', 'system', '2026-01-22 15:56:00');
+INSERT INTO `t_samp_office_regulations` VALUES ('351b063f-eb20-4cbd-abf6-7a1097adbebf', '办公', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'admin', '2026-01-15 09:48:00', 'system', '2026-01-22 15:56:00', 'doc-sample-2');
 
 -- ----------------------------
 -- Table structure for t_samp_search_engine
@@ -486,20 +490,23 @@ CREATE TABLE `t_samp_standard_base_info`  (
   `created_time` datetime(0) NULL DEFAULT CURRENT_TIMESTAMP(0) COMMENT '创建时间,默认当前时间',
   `updated_by` varchar(36) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NULL DEFAULT NULL COMMENT '修改人',
   `updated_time` datetime(0) NULL DEFAULT CURRENT_TIMESTAMP(0) ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '修改时间,默认当前时间',
-  PRIMARY KEY (`id`) USING BTREE
+  `document_id` varchar(36) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NOT NULL COMMENT '关联文档ID',
+  PRIMARY KEY (`id`) USING BTREE,
+  INDEX `idx_standard_document_id`(`document_id`) USING BTREE,
+  CONSTRAINT `fk_standard_document` FOREIGN KEY (`document_id`) REFERENCES `t_samp_document_main` (`id`) ON DELETE CASCADE ON UPDATE CASCADE
 ) ENGINE = InnoDB CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci COMMENT = '施工标准规范知识库' ROW_FORMAT = Dynamic;
 
 -- ----------------------------
 -- Records of t_samp_standard_base_info
 -- ----------------------------
-INSERT INTO `t_samp_standard_base_info` VALUES ('246a2423-3d2e-4985-98e8-0796a906a256', '测试 IntegrityError', NULL, 'TEST-001', '测试单位', '2026-01-22', NULL, NULL, NULL, NULL, NULL, NULL, NULL, '国家标准', '施工', NULL, '现行', NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'admin', '2026-01-22 12:36:24', 'system', '2026-01-22 12:36:24');
-INSERT INTO `t_samp_standard_base_info` VALUES ('2877d896-cce3-42b3-ba06-6c17688c7d68', 'Updated Test Document', NULL, 'STD-001-UPDATED', 'Updated Authority', '2024-02-01', NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'Standard', 'IT', NULL, 'Valid', NULL, NULL, NULL, NULL, 'http://test.com/file_updated.pdf', NULL, 'Updated content', 'admin', '2026-01-22 13:57:49', 'system', '2026-01-22 13:57:53');
-INSERT INTO `t_samp_standard_base_info` VALUES ('3a2a4d64-a962-4319-a84b-f89a739a355f', '新建 文本文档', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'admin', '2026-01-15 09:49:36', 'system', '2026-01-15 09:49:36');
-INSERT INTO `t_samp_standard_base_info` VALUES ('4c93b6eb-5d48-4d57-bf3f-1cf47e1c55ea', '新建 文本文档', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'http://192.168.91.15:19000/aidata/sampledata/uploads/20260122/a4d67bf7-0b07-4c6e-8bb8-6030f2331589.txt', NULL, '', 'admin', '2026-01-22 14:18:47', 'system', '2026-01-22 14:18:47');
-INSERT INTO `t_samp_standard_base_info` VALUES ('52e01c43-16a8-4005-9f61-936488130379', '建筑', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'admin', '2026-01-15 09:35:39', 'system', '2026-01-15 09:35:39');
-INSERT INTO `t_samp_standard_base_info` VALUES ('55025214-b478-4110-8e2d-47617a9676a3', '测试全流程上传文档', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'http://192.168.91.15:19000/aidata/sampledata/uploads/20260122/8bf35980-a2e6-418c-9d79-049256d024f6.txt', NULL, '这是通过自动化脚本测试的上传文档内容摘要。', 'admin', '2026-01-22 14:16:07', 'system', '2026-01-22 14:16:07');
-INSERT INTO `t_samp_standard_base_info` VALUES ('956f2bb9-b4ef-4c2a-ada5-c964a7664261', '测试 IntegrityError', NULL, 'TEST-001', '测试单位', '2026-01-22', NULL, NULL, NULL, NULL, NULL, NULL, NULL, '国家标准', '施工', NULL, '现行', NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'admin', '2026-01-22 12:28:53', 'system', '2026-01-22 12:28:53');
-INSERT INTO `t_samp_standard_base_info` VALUES ('e55e0d6e-c83e-474b-8b66-c41d19fe5e19', 'Updated Test Document', NULL, 'STD-001-UPDATED', 'Updated Authority', '2024-02-01', NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'Standard', 'IT', NULL, 'Valid', NULL, NULL, NULL, NULL, 'http://test.com/file_updated.pdf', NULL, 'Updated content', 'admin', '2026-01-22 13:52:26', 'system', '2026-01-22 13:52:30');
+INSERT INTO `t_samp_standard_base_info` VALUES ('246a2423-3d2e-4985-98e8-0796a906a256', '测试 IntegrityError', NULL, 'TEST-001', '测试单位', '2026-01-22', NULL, NULL, NULL, NULL, NULL, NULL, NULL, '国家标准', '施工', NULL, '现行', NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'admin', '2026-01-22 12:36:24', 'system', '2026-01-22 12:36:24', 'doc-sample-3');
+INSERT INTO `t_samp_standard_base_info` VALUES ('2877d896-cce3-42b3-ba06-6c17688c7d68', 'Updated Test Document', NULL, 'STD-001-UPDATED', 'Updated Authority', '2024-02-01', NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'Standard', 'IT', NULL, 'Valid', NULL, NULL, NULL, NULL, 'http://test.com/file_updated.pdf', NULL, 'Updated content', 'admin', '2026-01-22 13:57:49', 'system', '2026-01-22 13:57:53', 'doc-sample-4');
+INSERT INTO `t_samp_standard_base_info` VALUES ('3a2a4d64-a962-4319-a84b-f89a739a355f', '新建 文本文档', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'admin', '2026-01-15 09:49:36', 'system', '2026-01-15 09:49:36', 'doc-sample-5');
+INSERT INTO `t_samp_standard_base_info` VALUES ('4c93b6eb-5d48-4d57-bf3f-1cf47e1c55ea', '新建 文本文档', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'http://192.168.91.15:19000/aidata/sampledata/uploads/20260122/a4d67bf7-0b07-4c6e-8bb8-6030f2331589.txt', NULL, '', 'admin', '2026-01-22 14:18:47', 'system', '2026-01-22 14:18:47', 'doc-sample-6');
+INSERT INTO `t_samp_standard_base_info` VALUES ('52e01c43-16a8-4005-9f61-936488130379', '建筑', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'admin', '2026-01-15 09:35:39', 'system', '2026-01-15 09:35:39', 'doc-sample-7');
+INSERT INTO `t_samp_standard_base_info` VALUES ('55025214-b478-4110-8e2d-47617a9676a3', '测试全流程上传文档', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'http://192.168.91.15:19000/aidata/sampledata/uploads/20260122/8bf35980-a2e6-418c-9d79-049256d024f6.txt', NULL, '这是通过自动化脚本测试的上传文档内容摘要。', 'admin', '2026-01-22 14:16:07', 'system', '2026-01-22 14:16:07', 'doc-sample-8');
+INSERT INTO `t_samp_standard_base_info` VALUES ('956f2bb9-b4ef-4c2a-ada5-c964a7664261', '测试 IntegrityError', NULL, 'TEST-001', '测试单位', '2026-01-22', NULL, NULL, NULL, NULL, NULL, NULL, NULL, '国家标准', '施工', NULL, '现行', NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'admin', '2026-01-22 12:28:53', 'system', '2026-01-22 12:28:53', 'doc-sample-9');
+INSERT INTO `t_samp_standard_base_info` VALUES ('e55e0d6e-c83e-474b-8b66-c41d19fe5e19', 'Updated Test Document', NULL, 'STD-001-UPDATED', 'Updated Authority', '2024-02-01', NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'Standard', 'IT', NULL, 'Valid', NULL, NULL, NULL, NULL, 'http://test.com/file_updated.pdf', NULL, 'Updated content', 'admin', '2026-01-22 13:52:26', 'system', '2026-01-22 13:52:30', 'doc-sample-10');
 
 -- ----------------------------
 -- Table structure for t_samp_tag_category

+ 2 - 2
src/app/base/mineru_connection.py

@@ -61,10 +61,10 @@ class MinerUManager:
                     params.append(error)
                 if md_url is not None:
                     updates.append("md_url = %s")
-                    params.append(md_url)
+                    params.append(self.minio_manager.get_relative_path(md_url))
                 if json_url is not None:
                     updates.append("json_url = %s")
-                    params.append(json_url)
+                    params.append(self.minio_manager.get_relative_path(json_url))
                 
                 if not updates:
                     return

+ 68 - 9
src/app/base/minio_connection.py

@@ -20,15 +20,18 @@ class MinioManager:
     """MinIO 管理器"""
     
     def __init__(self):
-        self.endpoint = config_handler.get("admin_app", "MINIO_ENDPOINT", "192.168.91.15:19000")
-        self.access_key = config_handler.get("admin_app", "MINIO_ACCESS_KEY", "minioadmin")
-        self.secret_key = config_handler.get("admin_app", "MINIO_SECRET_KEY", "minioadmin")
-        self.bucket_name = config_handler.get("admin_app", "MINIO_BUCKET_NAME", "lq-bucket")
+        self.endpoint = config_handler.get("admin_app", "MINIO_ENDPOINT")
+        self.access_key = config_handler.get("admin_app", "MINIO_ACCESS_KEY")
+        self.secret_key = config_handler.get("admin_app", "MINIO_SECRET_KEY")
+        self.bucket_name = config_handler.get("admin_app", "MINIO_BUCKET_NAME")
         self.secure = config_handler.get_bool("admin_app", "MINIO_USE_SSL", False)
         self.base_path = config_handler.get("admin_app", "MINIO_BASE_PATH", "lqadmin")
+        self.file_base_url = config_handler.get("admin_app", "FILE_BASE_URL", f"http://{self.endpoint}/{self.bucket_name}/{self.base_path}")
         
         self._client = None
-        logger.info(f"初始化 MinIO 管理器: endpoint={self.endpoint}, bucket={self.bucket_name}")
+        logger.info(f"初始化 MinIO 管理器: endpoint={self.endpoint}, bucket={self.bucket_name}, base_path={self.base_path}")
+        if not all([self.endpoint, self.access_key, self.secret_key, self.bucket_name]):
+            logger.error(f"MinIO 配置不完整: endpoint={self.endpoint}, bucket={self.bucket_name}, access_key={self.access_key[:4] if self.access_key else 'None'}...")
 
     @property
     def client(self) -> Minio:
@@ -63,12 +66,14 @@ class MinioManager:
             expires=timedelta(minutes=expires_minutes)
         )
         
-        # 构造访问 URL
-        file_url = f"http://{self.endpoint}/{self.bucket_name}/{object_name}"
+        # 构造访问 URL (相对于 base_path 的路径)
+        relative_path = f"/uploads/{datetime.now().strftime('%Y%m%d')}/{unique_id}{ext}"
+        full_file_url = f"{self.file_base_url}{relative_path}"
         
         return {
             "upload_url": upload_url,
-            "file_url": file_url,
+            "file_url": full_file_url,
+            "relative_path": relative_path,
             "object_name": object_name
         }
 
@@ -81,7 +86,61 @@ class MinioManager:
             len(file_content),
             content_type=content_type
         )
-        return f"http://{self.endpoint}/{self.bucket_name}/{object_name}"
+        # 返回相对路径
+        if object_name.startswith(f"{self.base_path}/"):
+            return object_name[len(self.base_path):]
+        return f"/{object_name}"
+
+    def get_relative_path(self, url: str) -> str:
+        """从完整 URL 中提取相对路径"""
+        if not url:
+            return ""
+        if url.startswith(self.file_base_url):
+            return url[len(self.file_base_url):]
+        return url
+
+    def get_full_url(self, relative_path: str) -> str:
+        """将相对路径转换为完整 URL"""
+        if not relative_path:
+            return ""
+        if relative_path.startswith("http"):
+            return relative_path
+        # 确保以 / 开头
+        if not relative_path.startswith("/"):
+            relative_path = "/" + relative_path
+        return f"{self.file_base_url}{relative_path}"
+
+    def get_object_content(self, file_url: str) -> Optional[str]:
+        """根据文件 URL 获取内容,支持多种编码以解决乱码问题"""
+        try:
+            # 从 URL 中提取 object_name
+            # URL 格式: http://{endpoint}/{bucket}/{object_name}
+            parts = file_url.split(f"/{self.bucket_name}/")
+            if len(parts) < 2:
+                logger.error(f"无效的 MinIO URL: {file_url}")
+                return None
+            
+            object_name = parts[1]
+            response = self.client.get_object(self.bucket_name, object_name)
+            try:
+                data = response.read()
+            finally:
+                response.close()
+                response.release_conn()
+
+            # 尝试多种编码
+            encodings = ['utf-8', 'gbk', 'utf-8-sig', 'gb18030', 'utf-16']
+            for enc in encodings:
+                try:
+                    return data.decode(enc)
+                except UnicodeDecodeError:
+                    continue
+            
+            # 如果都失败了,尝试忽略错误解码(最后手段)
+            return data.decode('utf-8', errors='ignore')
+        except Exception as e:
+            logger.error(f"获取 MinIO 对象内容失败: {e}, URL: {file_url}")
+            return None
 
 def get_minio_manager() -> MinioManager:
     """获取 MinIO 管理器单例"""

+ 444 - 0
src/app/base/pymilvus_store_database.py

@@ -0,0 +1,444 @@
+import os
+import re
+import json
+import hashlib
+import logging
+from typing import List, Dict, Any, Optional, Tuple
+
+from langchain_core.documents import Document
+from pymilvus import (
+    MilvusClient,
+    DataType,
+    Function,
+    FunctionType,
+)
+
+# 导入项目配置和连接
+from app.core.config import config_handler
+from app.base.milvus_connection import get_milvus_manager
+from app.base.embedding_connection import get_embedding_model
+
+logger = logging.getLogger(__name__)
+
+# =============================
+# 一、配置区 (从项目配置中读取默认值)
+# =============================
+
+# 默认处理目录,建议通过环境变量或配置修改
+ROOT_DIR = config_handler.get("admin_app", "MILVUS_IMPORT_ROOT", r"C:\Users\ZengChao\Desktop\新建文件夹")
+
+# ✅ 父表 / 子表
+PARENT_COLLECTION_NAME = "test_22_parent"
+CHILD_COLLECTION_NAME = "test_22_child"
+
+DENSE_DIM_FALLBACK = 1024
+CHUNK_ID_START = 0
+
+# ✅ 父段最大长度(超过就把父段切成多条父表记录,但它们 parent_id 相同)
+PARENT_MAX_CHARS = 6000
+
+# ✅ 标量字段(用于过滤)
+BASE_SCALAR_FIELDS = {
+    "is_deleted": False,
+    "parent_id": 0,  # ✅ 注意:这里最终会变成 “父段组ID”
+    "doc_id": "DOC_123",
+    "doc_version": 20260118,
+    "tags": "policy,hr",
+}
+
+# ✅ metadata(放 JSON metadata 字段)
+BASE_METADATA_JSON = {
+    "source_type": "pdf",
+    "source_uri": "s3://kb/a.pdf",
+}
+
+# =============================
+# 二、工具:Markdown 切块
+# =============================
+
+BLANK_SPLIT_RE = re.compile(r"\n\s*\n+")
+H1_RE = re.compile(r"^#\s+(.+?)\s*$", re.MULTILINE)
+
+
+def split_md_by_blank_lines(md: str) -> List[str]:
+    md = md.replace("\r\n", "\n").replace("\r", "\n")
+    parts = BLANK_SPLIT_RE.split(md)
+    return [p.strip() for p in parts if p.strip()]
+
+
+def is_heading_chunk(chunk: str):
+    first_line = chunk.split("\n", 1)[0].strip()
+    m = re.match(r"^(#{1,6})\s+(.+?)\s*$", first_line)
+    if not m:
+        return None
+    return len(m.group(1)), m.group(2).strip()
+
+
+def outline_path_str(path: List[str]) -> str:
+    return " > ".join(path)
+
+
+def guess_doc_name_from_filename(file_name: str) -> str:
+    return os.path.splitext(file_name)[0]
+
+
+def split_md_by_h1_sections(md: str) -> List[Tuple[str, str]]:
+    """
+    按 '# 一级标题' 切成父段:
+    return: [(h1_title, section_text), ...]
+    - 如果最开始有内容(第一个#之前),将其作为 "__PREAMBLE__" 段
+    - section_text 包含该 # 行本身 + 直到下一个 # 之前的所有内容
+    - 如果全文没有任何 #,则返回一个默认段 ("__NO_H1__", 全文)
+    """
+    md = md.replace("\r\n", "\n").replace("\r", "\n")
+
+    matches = list(H1_RE.finditer(md))
+    if not matches:
+        txt = md.strip()
+        if not txt:
+            return []
+        return [("__NO_H1__", txt)]
+
+    sections: List[Tuple[str, str]] = []
+    
+    # 检查第一个#之前是否有内容
+    first_match_start = matches[0].start()
+    preamble = md[:first_match_start].strip()
+    if preamble:
+        sections.append(("__PREAMBLE__", preamble))
+    
+    # 处理所有#标题段
+    for i, m in enumerate(matches):
+        title = m.group(1).strip()
+        start = m.start()
+        end = matches[i + 1].start() if i + 1 < len(matches) else len(md)
+        sec = md[start:end].strip()
+        if sec:
+            sections.append((title, sec))
+    return sections
+
+
+def make_parent_id(doc_id: str, doc_version: int, doc_name: str, h1_title: str, parent_seq: int) -> int:
+    """
+    ✅ 生成稳定 parent_id(父段ID)
+    同一个 # 一级标题段无论父表切成几条记录,都共享同一个 parent_id
+    """
+    raw = f"{doc_id}|{doc_version}|{doc_name}|{parent_seq}|{h1_title}".encode("utf-8")
+    return int(hashlib.sha1(raw).hexdigest()[:16], 16) & ((1 << 63) - 1)
+
+
+def split_text_by_max_chars(text: str, max_chars: int) -> List[str]:
+    """
+    父段过长时切片:
+    - 优先在最大长度附近的空行边界切割
+    - 单个段落超过max_chars时才硬切
+    """
+    text = (text or "").strip()
+    if not text:
+        return []
+    if len(text) <= max_chars:
+        return [text]
+
+    # 先按空行切割
+    chunks = split_md_by_blank_lines(text)
+    
+    result = []
+    current_slice = ""
+    
+    for chunk in chunks:
+        # 如果单个chunk超过max_chars,必须硬切
+        if len(chunk) > max_chars:
+            # 先保存当前累积的内容
+            if current_slice.strip():
+                result.append(current_slice.strip())
+                current_slice = ""
+            # 对超长chunk硬切
+            start = 0
+            while start < len(chunk):
+                result.append(chunk[start : start + max_chars].strip())
+                start += max_chars
+        else:
+            # 尝试把chunk加入current_slice
+            test_slice = current_slice + "\n\n" + chunk if current_slice else chunk
+            if len(test_slice) <= max_chars:
+                # 可以加入
+                current_slice = test_slice
+            else:
+                # 超过了,保存current_slice,开始新的
+                if current_slice.strip():
+                    result.append(current_slice.strip())
+                current_slice = chunk
+    
+    # 保存最后的current_slice
+    if current_slice.strip():
+        result.append(current_slice.strip())
+    
+    return [s for s in result if s]
+
+
+def build_parent_and_child_documents_from_md(md_text: str, file_name: str) -> Tuple[List[Document], List[Document]]:
+    """
+    ✅ 切分顺序:
+    1. 先按 # 一级标题切父块
+    2. 用切好的父块来处理子块(按空行切)
+    3. 最后处理超长的父块(父块太长再切成多条父记录,共享同一个 parent_id)
+    """
+    doc_name = guess_doc_name_from_filename(file_name)
+
+    # 1) 按 # 一级标题切父块
+    parent_sections = split_md_by_h1_sections(md_text)
+
+    parent_seq_to_parent_id: Dict[int, int] = {}
+    # 先生成所有 parent_id
+    for parent_seq, (h1_title, sec_text) in enumerate(parent_sections):
+        parent_id = make_parent_id(
+            doc_id=str(BASE_SCALAR_FIELDS["doc_id"]),
+            doc_version=int(BASE_SCALAR_FIELDS["doc_version"]),
+            doc_name=doc_name,
+            h1_title=h1_title,
+            parent_seq=parent_seq,
+        )
+        parent_seq_to_parent_id[parent_seq] = parent_id
+
+    # 2) 用切好的父块来处理子块(按空行切,但在父块范围内)
+    child_docs: List[Document] = []
+    chunk_id_counter = CHUNK_ID_START
+
+    for parent_seq, (h1_title, sec_text) in enumerate(parent_sections):
+        parent_id = parent_seq_to_parent_id[parent_seq]
+        
+        # 在该父块范围内按空行切子块
+        chunks = split_md_by_blank_lines(sec_text)
+        heading_path: List[str] = []
+
+        for chunk in chunks:
+            # 子 chunk outline_path
+            heading_info = is_heading_chunk(chunk)
+            if heading_info:
+                level, title = heading_info
+                parent_path = heading_path[: level - 1]
+                outline_path = outline_path_str(parent_path)
+                heading_path = parent_path + [title]
+            else:
+                outline_path = outline_path_str(heading_path)
+
+            scalar_md = dict(BASE_SCALAR_FIELDS)
+            scalar_md["chunk_id"] = chunk_id_counter
+            scalar_md["parent_id"] = int(parent_id)
+
+            # ✅ metadata 只包含:source_uri, source_type, doc_name, outline_path, chunk_id
+            metadata_json = {
+                "source_uri": BASE_METADATA_JSON.get("source_uri", ""),
+                "source_type": BASE_METADATA_JSON.get("source_type", ""),
+                "doc_name": doc_name,
+                "outline_path": outline_path,
+                "chunk_id": chunk_id_counter,
+            }
+
+            child_docs.append(
+                Document(
+                    page_content=chunk,
+                    metadata={**scalar_md, "metadata": metadata_json},
+                )
+            )
+            chunk_id_counter += 1
+
+    # 3) 处理超长的父块(父块太长再切成多条父记录)
+    parent_docs: List[Document] = []
+
+    for parent_seq, (h1_title, sec_text) in enumerate(parent_sections):
+        parent_id = parent_seq_to_parent_id[parent_seq]
+        
+        # 如果父块过长,切成多条
+        slices = split_text_by_max_chars(sec_text, PARENT_MAX_CHARS)
+        for slice_idx, slice_text in enumerate(slices):
+            scalar_md = dict(BASE_SCALAR_FIELDS)
+            scalar_md["chunk_id"] = CHUNK_ID_START + parent_seq
+            scalar_md["parent_id"] = int(parent_id)
+
+            # ✅ metadata 只包含:source_uri, source_type, doc_name, outline_path, chunk_id
+            if h1_title == "__PREAMBLE__":
+                outline_path = doc_name
+            elif h1_title == "__NO_H1__":
+                outline_path = ""
+            else:
+                outline_path = h1_title
+            
+            metadata_json = {
+                "source_uri": BASE_METADATA_JSON.get("source_uri", ""),
+                "source_type": BASE_METADATA_JSON.get("source_type", ""),
+                "doc_name": doc_name,
+                "outline_path": outline_path,
+                "chunk_id": CHUNK_ID_START + parent_seq,
+            }
+
+            parent_docs.append(
+                Document(
+                    page_content=slice_text,
+                    metadata={**scalar_md, "metadata": metadata_json},
+                )
+            )
+
+    return parent_docs, child_docs
+
+
+def save_docs_to_json(docs: List[Document], out_path: str) -> str:
+    if not docs:
+        return ""
+    docs_data = [{"page_content": d.page_content, "metadata": d.metadata} for d in docs]
+    with open(out_path, "w", encoding="utf-8") as f:
+        json.dump(docs_data, f, ensure_ascii=False, indent=2)
+    return out_path
+
+
+# =============================
+# 三、Milvus:建 collection(dense + BM25 + 标量字段 + JSON metadata)
+# =============================
+
+def detect_dense_dim(emb) -> int:
+    return len(emb.embed_query("dim probe"))
+
+
+def ensure_collection(client: MilvusClient, collection_name: str, dense_dim: int):
+    if client.has_collection(collection_name=collection_name):
+        return
+
+    schema = client.create_schema(auto_id=True, enable_dynamic_fields=False)
+
+    schema.add_field("pk", DataType.INT64, is_primary=True, auto_id=True)
+
+    # ✅ BM25 输入字段必须 enable_analyzer=True
+    schema.add_field("text", DataType.VARCHAR, max_length=65535, enable_analyzer=True)
+
+    schema.add_field("dense_vec", DataType.FLOAT_VECTOR, dim=dense_dim)
+    schema.add_field("sparse_bm25", DataType.SPARSE_FLOAT_VECTOR)
+
+    schema.add_field("is_deleted", DataType.BOOL)
+    schema.add_field("parent_id", DataType.INT64)  # ✅ 这里的 parent_id = 父段组ID
+    schema.add_field("doc_id", DataType.VARCHAR, max_length=256)
+    schema.add_field("doc_version", DataType.INT64)
+    schema.add_field("tags", DataType.VARCHAR, max_length=2048)
+
+    schema.add_field("metadata", DataType.JSON)
+
+    schema.add_function(
+        Function(
+            name="bm25_fn",
+            input_field_names=["text"],
+            output_field_names=["sparse_bm25"],
+            function_type=FunctionType.BM25,
+        )
+    )
+
+    client.create_collection(collection_name=collection_name, schema=schema)
+
+    index_params = client.prepare_index_params()
+    index_params.add_index(
+        field_name="dense_vec",
+        index_name="dense_idx",
+        index_type="AUTOINDEX",
+        metric_type="COSINE",
+    )
+    index_params.add_index(
+        field_name="sparse_bm25",
+        index_name="bm25_idx",
+        index_type="SPARSE_INVERTED_INDEX",
+        metric_type="BM25",
+        params={"inverted_index_algo": "DAAT_MAXSCORE"},
+    )
+
+    client.create_index(collection_name=collection_name, index_params=index_params)
+    client.load_collection(collection_name=collection_name)
+
+
+# =============================
+# 四、写入:dense 由 embedding 生成;BM25 由 Milvus 自动生成
+# =============================
+
+def docs_to_entities(docs: List[Document], emb) -> List[Dict[str, Any]]:
+    texts = [d.page_content for d in docs]
+    dense_vecs = emb.embed_documents(texts)
+
+    entities: List[Dict[str, Any]] = []
+    for d, vec in zip(docs, dense_vecs):
+        md = d.metadata or {}
+
+        entities.append(
+            {
+                "text": d.page_content,
+                "dense_vec": vec,
+                "is_deleted": bool(md.get("is_deleted", False)),
+                "parent_id": int(md.get("parent_id", 0)),
+                "doc_id": str(md.get("doc_id", "")),
+                "doc_version": int(md.get("doc_version", 0)),
+                "tags": str(md.get("tags", "")),
+                "metadata": md.get("metadata", {}) if isinstance(md.get("metadata", {}), dict) else {},
+            }
+        )
+    return entities
+
+
+def insert_docs(client: MilvusClient, emb, docs: List[Document], collection_name: str):
+    if not docs:
+        return
+    entities = docs_to_entities(docs, emb)
+    client.insert(collection_name=collection_name, data=entities)
+
+
+# =============================
+# 五、主程序:只负责入库(父表 + 子表)
+# =============================
+
+if __name__ == "__main__":
+    # 使用项目统一的 embedding 模型
+    emb = get_embedding_model()
+    try:
+        dense_dim = detect_dense_dim(emb)
+    except Exception:
+        dense_dim = DENSE_DIM_FALLBACK
+
+    # 使用项目统一的 Milvus 管理器
+    milvus_manager = get_milvus_manager()
+    client = milvus_manager.client
+
+    # ✅ 建两个表:父表 + 子表
+    ensure_collection(client, PARENT_COLLECTION_NAME, dense_dim=dense_dim)
+    ensure_collection(client, CHILD_COLLECTION_NAME, dense_dim=dense_dim)
+
+    if not os.path.exists(ROOT_DIR):
+        print(f"❌ 目录不存在:{ROOT_DIR}")
+    else:
+        for folder_name in os.listdir(ROOT_DIR):
+            folder_path = os.path.join(ROOT_DIR, folder_name)
+            if not os.path.isdir(folder_path):
+                continue
+
+            for file_name in os.listdir(folder_path):
+                if not file_name.lower().endswith(".md"):
+                    continue
+
+                md_path = os.path.join(folder_path, file_name)
+                try:
+                    print(f"\n📄 正在处理:{md_path}")
+
+                    with open(md_path, "r", encoding="utf-8") as f:
+                        text = f.read()
+
+                    parent_docs, child_docs = build_parent_and_child_documents_from_md(text, file_name)
+
+                    # 可选:落盘看切分效果
+                    out_dir = os.path.dirname(md_path)
+                    base = os.path.splitext(os.path.basename(md_path))[0]
+                    save_docs_to_json(parent_docs, os.path.join(out_dir, f"{base}_parents.json"))
+                    save_docs_to_json(child_docs, os.path.join(out_dir, f"{base}_children.json"))
+
+                    # ✅ 写父表 & 子表
+                    insert_docs(client, emb, parent_docs, PARENT_COLLECTION_NAME)
+                    insert_docs(client, emb, child_docs, CHILD_COLLECTION_NAME)
+
+                    print(f"✅ 父表写入:parents={len(parent_docs)} -> {PARENT_COLLECTION_NAME}")
+                    print(f"✅ 子表写入:children={len(child_docs)} -> {CHILD_COLLECTION_NAME}")
+
+                except Exception as e:
+                    print(f"❌ 处理失败:{md_path}")
+                    print(e)

+ 2 - 0
src/app/config/config.ini

@@ -85,6 +85,8 @@ MINIO_SECRET_KEY=ilVITuWEmjG0YDBAWspSG6UniHpEoMDECSuCo0f1
 MINIO_BUCKET_NAME=aidata
 MINIO_USE_SSL=False
 MINIO_BASE_PATH=sampledata
+# 文件访问基础URL
+FILE_BASE_URL=http://192.168.91.15:19000/aidata/sampledata
 
 # MinERU 配置
 MINERU_ACCESS_KEY=

+ 7 - 2
src/app/core/config.py

@@ -10,6 +10,10 @@ import os
 import sys
 
 
+import logging
+
+logger = logging.getLogger(__name__)
+
 class ConfigHandler:
     def __init__(self, config_file=None):
         self.config = ConfigParser()
@@ -24,9 +28,10 @@ class ConfigHandler:
         # 确保路径存在
         if os.path.exists(config_file):
             self.config.read(config_file, encoding='utf-8')
-            print(f"✅ 配置文件加载成功: {config_file}")
+            logger.info(f"✅ 配置文件加载成功: {config_file}")
+            logger.info(f"加载的节: {self.config.sections()}")
         else:
-            print(f"⚠️  配置文件未找到: {config_file}")
+            logger.warning(f"⚠️  配置文件未找到: {config_file}")
     
     def get(self, section, option, default=None):
         """获取配置值"""

+ 12 - 24
src/app/sample/models/base_info.py

@@ -1,7 +1,7 @@
 """
 样本中心基本信息数据库模型
 """
-from sqlalchemy import Column, String, Text, Date, Enum, Integer
+from sqlalchemy import Column, String, Text, Date, Enum, Integer, DateTime, func
 from sqlalchemy.dialects.mysql import CHAR, LONGTEXT, TINYINT
 from app.models.base import BaseModel
 
@@ -18,20 +18,14 @@ class StandardBaseInfo(BaseModel):
     implementation_date = Column(Date, nullable=True, comment="实施日期")
     drafting_unit = Column(String(255), nullable=True, comment="主编单位")
     approving_department = Column(String(255), nullable=True, comment="批准部门")
-    participating_units_1 = Column(String(255), nullable=True, comment="参编单位_1,可多条,建议用JSON数组或拆分多个字段;此处为第一个参编单位")
-    participating_units_2 = Column(String(255), nullable=True, comment="参编单位_2,第二个参编单位")
-    participating_units_3 = Column(String(255), nullable=True, comment="参编单位_3,第三个参编单位")
-    participating_units_4 = Column(String(255), nullable=True, comment="参编单位_4,第四个参编单位")
+    participating_units = Column(String(255), nullable=True, comment="参编单位,可多条,建议用JSON数组或拆分多个字段;此处为第一个参编单位")
     document_type = Column(String(50), nullable=True, comment="文件类型,枚举:法律法规、国家标准、行业标准、企业标准、规范性文件、技术规范")
     professional_field = Column(String(50), nullable=True, comment="专业领域,枚举:法律、参考规范、行政法规、部门规章")
     engineering_phase = Column(String(50), nullable=True, comment="工程阶段,待补充具体枚举值,如:规划、设计、施工、运维等")
     validity = Column(String(20), nullable=True, comment="时效性,枚举:现行、已废止、被替代")
-    reference_basis_1 = Column(String(500), nullable=True, comment="参考依据_1")
-    reference_basis_2 = Column(String(500), nullable=True, comment="参考依据_2")
+    reference_basis = Column(String(500), nullable=True, comment="参考依据")
     source_url = Column(String(500), nullable=True, comment="文件来源网址,如:http://www.xxx.com/xxx/xxx/xxx.pdf")
-    file_id = Column(String(255), nullable=True, comment="文件ID(OSS上传后),OSS对象存储唯一标识,如:oss://bucket-name/path/to/file.pdf")
-    file_url = Column(String(500), nullable=True, comment="文件URL")
-    content = Column(LONGTEXT, nullable=True, comment="文件内容")
+    note = Column(String(500), nullable=True, comment="文件备注")
 
     def __repr__(self):
         return f"<StandardBaseInfo {self.chinese_name}>"
@@ -47,17 +41,13 @@ class ConstructionPlanBaseInfo(BaseModel):
     compiling_unit = Column(String(255), nullable=True, comment="编制单位")
     compiling_date = Column(Date, nullable=True, comment="编制日期")
     plan_summary = Column(Text, nullable=True, comment="方案概述")
-    compilation_basis_1 = Column(String(500), nullable=True, comment="编制依据_1")
-    compilation_basis_2 = Column(String(500), nullable=True, comment="编制依据_2")
-    compilation_basis_3 = Column(String(500), nullable=True, comment="编制依据_3")
-    compilation_basis_4 = Column(String(500), nullable=True, comment="编制依据_4")
-    compilation_basis_5 = Column(String(500), nullable=True, comment="编制依据_5")
-    compilation_basis_6 = Column(String(500), nullable=True, comment="编制依据_6")
-    compilation_basis_7 = Column(String(500), nullable=True, comment="编制依据_7")
-    compilation_basis_8 = Column(String(500), nullable=True, comment="编制依据_8")
-    compilation_basis_9 = Column(String(500), nullable=True, comment="编制依据_9")
-    file_url = Column(String(500), nullable=True, comment="文件URL")
-    content = Column(LONGTEXT, nullable=True, comment="文件内容")
+    compilation_basis = Column(Text, nullable=True, comment="编制依据(合并后)")
+    plan_category = Column(String(100), nullable=True, comment="方案类别")
+    level_1_classification = Column(String(100), nullable=True, default="施工方案", comment="一级分类")
+    level_2_classification = Column(String(100), nullable=True, comment="二级分类")
+    level_3_classification = Column(String(100), nullable=True, comment="三级分类")
+    level_4_classification = Column(String(100), nullable=True, comment="四级分类")
+    note = Column(String(500), nullable=True, comment="文件备注")
 
     def __repr__(self):
         return f"<ConstructionPlanBaseInfo {self.plan_name}>"
@@ -73,8 +63,7 @@ class OfficeRegulations(BaseModel):
     publish_date = Column(Date, nullable=True, comment="发布日期")
     effective_start_date = Column(Date, nullable=True, comment="时效开始日期")
     effective_end_date = Column(Date, nullable=True, comment="时效结束日期,若为长期有效,可设为 NULL 或 9999-12-31")
-    file_url = Column(String(500), nullable=True, comment="文件URL")
-    content = Column(LONGTEXT, nullable=True, comment="文件内容")
+    note = Column(String(500), nullable=True, comment="文件备注")
 
     def __repr__(self):
         return f"<OfficeRegulations {self.file_name}>"
@@ -105,7 +94,6 @@ class DocumentMain(BaseModel):
 
     id = Column(String(36), primary_key=True, comment="主键")
     source_type = Column(Enum('basis', 'work', 'job'), nullable=False, comment="所属类型")
-    source_id = Column(String(36), nullable=False, comment="所属ID")
     title = Column(String(255), nullable=False, comment="文档名称")
     conversion_status = Column(Integer, nullable=False, default=0, comment="状态: 0-待转换, 1-转换中, 2-完成, 3-失败")
     whether_to_enter = Column(Integer, nullable=False, default=0, comment="是否入库: 0-未入库, 1-已入库")

+ 8 - 2
src/app/sample/schemas/sample_schemas.py

@@ -24,14 +24,13 @@ class ConvertRequest(BaseModel):
 
 class DocumentAdd(BaseModel):
     title: str
-    content: str
+    note: Optional[str] = None
     primary_category_id: Optional[Any] = None
     secondary_category_id: Optional[Any] = None
     year: Optional[int] = None
     table_type: Optional[str] = "basis"
     # 新增编辑需要的字段
     id: Optional[str] = None
-    source_id: Optional[str] = None
     # 扩展字段 (子表特有属性)
     standard_no: Optional[str] = None
     issuing_authority: Optional[str] = None
@@ -41,6 +40,13 @@ class DocumentAdd(BaseModel):
     validity: Optional[str] = None
     project_name: Optional[str] = None
     project_section: Optional[str] = None
+    compilation_basis: Optional[str] = None
+    plan_summary: Optional[str] = None
+    plan_category: Optional[str] = None
+    level_1_classification: Optional[str] = "施工方案"
+    level_2_classification: Optional[str] = None
+    level_3_classification: Optional[str] = None
+    level_4_classification: Optional[str] = None
     # 文件相关字段
     file_url: Optional[str] = None
     json_url: Optional[str] = None

+ 22 - 1
src/app/services/image_service.py

@@ -9,6 +9,7 @@ from typing import Optional, List, Dict, Any, Tuple
 from datetime import datetime
 from app.base.async_mysql_connection import get_db_connection
 from app.base.minio_connection import get_minio_manager
+from app.services.task_service import task_service
 
 logger = logging.getLogger(__name__)
 
@@ -183,6 +184,11 @@ class ImageService:
             cursor.execute(list_sql, tuple(params + [page_size, offset]))
             images = cursor.fetchall()
             
+            # 处理 URL 转换
+            for item in images:
+                if item.get('image_url'):
+                    item['image_url'] = self.minio_manager.get_full_url(item['image_url'])
+            
             return {
                 "total": total,
                 "list": images,
@@ -206,7 +212,8 @@ class ImageService:
         try:
             image_id = str(uuid.uuid4())
             image_name = data.get('image_name')
-            image_url = data.get('image_url')
+            # 处理 URL 存储(转为相对路径)
+            image_url = self.minio_manager.get_relative_path(data.get('image_url'))
             image_type = data.get('image_type')
             description = data.get('description')
             
@@ -220,6 +227,13 @@ class ImageService:
                 ) VALUES (%s, %s, %s, %s, %s, %s, %s, NOW(), NOW())
             """
             cursor.execute(sql, (image_id, image_name, image_url, image_type, description, user_id, user_id))
+            
+            # 添加到任务管理中心 (类型为 image)
+            try:
+                await task_service.add_task(image_id, 'image')
+            except Exception as task_err:
+                logger.error(f"添加图片 {image_name} 到任务中心失败: {task_err}")
+                
             conn.commit()
             return True, "保存成功"
         except Exception as e:
@@ -239,6 +253,13 @@ class ImageService:
         cursor = conn.cursor()
         try:
             cursor.execute("DELETE FROM t_image_info WHERE id = %s", (image_id,))
+            
+            # 同步从任务管理中心删除
+            try:
+                await task_service.delete_task(image_id)
+            except Exception as task_err:
+                logger.error(f"从任务中心删除图片 {image_id} 失败: {task_err}")
+
             conn.commit()
             return True, "删除成功"
         except Exception as e:

+ 284 - 477
src/app/services/milvus_service.py

@@ -1,495 +1,302 @@
-"""
-Milvus Service:业务层(直接用 manager.client 调 Milvus 原生方法)
-"""
-from __future__ import annotations
-
-import sys
-import os
-
-# 添加src目录到Python路径
-sys.path.insert(0, os.path.join(os.path.dirname(__file__), '../..'))
-sys.path.insert(0, os.path.join(os.path.dirname(__file__), '../../..'))
 
+import time
+import re
+import hashlib
 import logging
-from typing import List, Dict, Any
-from datetime import datetime
+import json
+from typing import List, Dict, Any, Tuple, Optional
+from langchain_core.documents import Document
+from langchain_openai import OpenAIEmbeddings
+from pymilvus import MilvusClient, DataType, Function, FunctionType
 
-from app.base import get_milvus_manager, get_milvus_vectorstore, get_embedding_model
+from app.core.config import config_handler
 
 logger = logging.getLogger(__name__)
 
-
 class MilvusService:
-    def __init__(self):
-        self.client = get_milvus_manager().client
-        # 获取embedding model
-        self.emdmodel = get_embedding_model()
+    """Milvus 向量库服务类,实现父子块切分与混合检索存储"""
+    
+    def __init__(self, uri: str, db_name: str, parent_collection: str, child_collection: str):
+        self.client = MilvusClient(uri=uri, db_name=db_name)
+        self.parent_collection = parent_collection
+        self.child_collection = child_collection
+        self.emb = self._get_embeddings()
+        
+        # 配置参数
+        self.PARENT_MAX_CHARS = 6000
+        self.DENSE_DIM = 4096
+        self.H1_RE = re.compile(r"^#\s+(.+?)\s*$", re.MULTILINE)
+        self.BLANK_SPLIT_RE = re.compile(r"\n\s*\n+")
+
+    def has_collection(self, collection_name: str) -> bool:
+        """检查集合是否存在"""
+        return self.client.has_collection(collection_name=collection_name)
+
+    def _get_embeddings(self) -> OpenAIEmbeddings:
+        """获取 Embedding 模型配置"""
+        return OpenAIEmbeddings(
+            base_url=config_handler.get("admin_app", "EMBEDDING_BASE_URL", "http://192.168.91.253:9003/v1"),
+            model=config_handler.get("admin_app", "EMBEDDING_MODEL", "Qwen3-Embedding-8B"),
+            api_key=config_handler.get("admin_app", "EMBEDDING_API_KEY", "dummy"),
+        )
 
-    def create_collection(self, name: str, dimension: int = 768, description: str = "", fields: List[Dict] = None) -> None:
-        """
-        创建 Milvus 集合
-        :param fields: 自定义字段列表,每个元素为 {"name": "age", "type": "INT64", ...}
-        """
-        if self.client.has_collection(name):
-            logger.info(f"Collection {name} already exists.")
-            return
+    # --- 切分工具方法 ---
+    
+    def _split_md_by_blank_lines(self, md: str) -> List[str]:
+        md = md.replace("\r\n", "\n").replace("\r", "\n")
+        parts = self.BLANK_SPLIT_RE.split(md)
+        return [p.strip() for p in parts if p.strip()]
+
+    def _is_heading_chunk(self, chunk: str) -> Optional[Tuple[int, str]]:
+        first_line = chunk.split("\n", 1)[0].strip()
+        m = re.match(r"^(#{1,6})\s+(.+?)\s*$", first_line)
+        if not m:
+            return None
+        return len(m.group(1)), m.group(2).strip()
+
+    def _split_md_by_h1_sections(self, md: str) -> List[Tuple[str, str]]:
+        """按一级标题切分父块"""
+        md = md.replace("\r\n", "\n").replace("\r", "\n")
+        matches = list(self.H1_RE.finditer(md))
+        if not matches:
+            txt = md.strip()
+            return [("__NO_H1__", txt)] if txt else []
+
+        sections = []
+        # 检查第一个#之前的内容
+        first_match_start = matches[0].start()
+        preamble = md[:first_match_start].strip()
+        if preamble:
+            sections.append(("__PREAMBLE__", preamble))
         
-        # 如果有自定义字段,使用 schema 创建
-        if fields:
-            from pymilvus import MilvusClient, DataType
-            
-            # 1. 创建 Schema
-            schema = MilvusClient.create_schema(
-                auto_id=True,
-                enable_dynamic_field=True,
-                description=description
-            )
-            
-            # 2. 添加必须的默认字段
-            schema.add_field(field_name="id", datatype=DataType.INT64, is_primary=True, auto_id=True)
-            schema.add_field(field_name="vector", datatype=DataType.FLOAT_VECTOR, dim=dimension)
-            # schema.add_field(field_name="sparse", datatype=DataType.SPARSE_FLOAT_VECTOR) # 如果需要混合检索,可能需要
-            
-            # 3. 添加用户自定义字段
-            # 映射字符串类型到 pymilvus DataType
-            type_map = {
-                "BOOL": DataType.BOOL,
-                "INT8": DataType.INT8,
-                "INT16": DataType.INT16,
-                "INT32": DataType.INT32,
-                "INT64": DataType.INT64,
-                "FLOAT": DataType.FLOAT,
-                "DOUBLE": DataType.DOUBLE,
-                "VARCHAR": DataType.VARCHAR,
-                "JSON": DataType.JSON,
-                "FLOAT_VECTOR": DataType.FLOAT_VECTOR
-            }
-            
-            for f in fields:
-                dtype = type_map.get(f.get("type", "").upper())
-                if not dtype:
-                    continue # 忽略未知类型
-                
-                kwargs = {
-                    "field_name": f.get("name"),
-                    "datatype": dtype,
-                    "description": f.get("description", "")
-                }
+        for i, m in enumerate(matches):
+            title = m.group(1).strip()
+            start = m.start()
+            end = matches[i + 1].start() if i + 1 < len(matches) else len(md)
+            sec = md[start:end].strip()
+            if sec:
+                sections.append((title, sec))
+        return sections
+
+    def _make_parent_id(self, doc_id: str, doc_version: int, doc_name: str, h1_title: str, parent_seq: int) -> str:
+        """生成稳定的 parent_id"""
+        raw = f"{doc_id}|{doc_version}|{doc_name}|{parent_seq}|{h1_title}".encode("utf-8")
+        return hashlib.sha1(raw).hexdigest()
+
+    def _split_text_by_max_chars(self, text: str, max_chars: int) -> List[str]:
+        """父段过长时切片"""
+        text = (text or "").strip()
+        if not text or len(text) <= max_chars:
+            return [text] if text else []
+
+        chunks = self._split_md_by_blank_lines(text)
+        result = []
+        current_slice = ""
+        
+        for chunk in chunks:
+            if len(chunk) > max_chars:
+                if current_slice.strip():
+                    result.append(current_slice.strip())
+                    current_slice = ""
+                start = 0
+                while start < len(chunk):
+                    result.append(chunk[start : start + max_chars].strip())
+                    start += max_chars
+            else:
+                test_slice = current_slice + "\n\n" + chunk if current_slice else chunk
+                if len(test_slice) <= max_chars:
+                    current_slice = test_slice
+                else:
+                    if current_slice.strip():
+                        result.append(current_slice.strip())
+                    current_slice = chunk
+        
+        if current_slice.strip():
+            result.append(current_slice.strip())
+        return [s for s in result if s]
+
+    # --- 核心业务逻辑 ---
+
+    def ensure_collections(self):
+        """确保父子 Collection 已创建并配置索引"""
+        for col_name in [self.parent_collection, self.child_collection]:
+            if not self.client.has_collection(collection_name=col_name):
+                schema = self.client.create_schema(auto_id=True, enable_dynamic_fields=False)
+                schema.add_field("pk", DataType.INT64, is_primary=True, auto_id=True)
+                schema.add_field("text", DataType.VARCHAR, max_length=65535, enable_analyzer=True)
+                schema.add_field("dense", DataType.FLOAT_VECTOR, dim=self.DENSE_DIM)
+                schema.add_field("sparse", DataType.SPARSE_FLOAT_VECTOR)
+                schema.add_field("document_id", DataType.VARCHAR, max_length=256)
+                schema.add_field("parent_id", DataType.VARCHAR, max_length=256)
+                schema.add_field("index", DataType.INT64)
+                schema.add_field("tag_list", DataType.VARCHAR, max_length=2048)
+                schema.add_field("permission", DataType.JSON)
+                schema.add_field("metadata", DataType.JSON)
+                schema.add_field("is_deleted", DataType.INT64)
+                schema.add_field("created_by", DataType.VARCHAR, max_length=256)
+                schema.add_field("created_time", DataType.INT64)
+                schema.add_field("updated_by", DataType.VARCHAR, max_length=256)
+                schema.add_field("updated_time", DataType.INT64)
+
+                schema.add_function(Function(
+                    name="bm25_fn",
+                    input_field_names=["text"],
+                    output_field_names=["sparse"],
+                    function_type=FunctionType.BM25,
+                ))
                 
-                if dtype == DataType.VARCHAR:
-                    kwargs["max_length"] = f.get("max_length", 65535)
+                self.client.create_collection(collection_name=col_name, schema=schema)
                 
-                schema.add_field(**kwargs)
-            
-            # 4. 准备索引参数
-            index_params = self.client.prepare_index_params()
-            
-            # 5. 添加向量索引
-            index_params.add_index(
-                field_name="vector", 
-                index_type="AUTOINDEX",
-                metric_type="COSINE"
-            )
+                index_params = self.client.prepare_index_params()
+                index_params.add_index(field_name="dense", index_name="dense_idx", index_type="AUTOINDEX", metric_type="COSINE")
+                index_params.add_index(field_name="sparse", index_name="bm25_idx", index_type="SPARSE_INVERTED_INDEX", metric_type="BM25", params={"inverted_index_algo": "DAAT_MAXSCORE"})
+                self.client.create_index(collection_name=col_name, index_params=index_params)
             
-            # 6. 为自定义标量字段添加索引 (可选,这里为所有标量字段添加倒排索引以加速过滤)
-            for f in fields:
-                # VARCHAR/INT/BOOL 等支持索引
-                if f.get("type", "").upper() in ["VARCHAR", "INT64", "INT32", "BOOL"]:
-                    index_params.add_index(
-                        field_name=f.get("name"),
-                        index_type="INVERTED" # 标量字段倒排索引
-                    )
-
-            # 7. 创建集合
-            self.client.create_collection(
-                collection_name=name,
-                schema=schema,
-                index_params=index_params
-            )
-            
-        else:
-            # 使用简化的 create_collection API
-            self.client.create_collection(
-                collection_name=name,
-                dimension=dimension,
-                description=description,
-                auto_id=True,  # 自动生成 ID
-                id_type="int", # ID 类型
-                metric_type="COSINE" # 默认使用余弦相似度
-            )
+            self.client.load_collection(collection_name=col_name)
+
+    async def insert_knowledge(self, md_text: str, doc_info: Dict[str, Any]):
+        """执行切分、向量化并存入 Milvus"""
+        doc_id = doc_info['doc_id']
+        doc_name = doc_info.get('doc_name', 'unknown')
+        doc_version = doc_info.get('doc_version', 20260127)
+        tag_list = str(doc_info.get('tags') or '')
         
-        logger.info(f"Created collection {name} with dimension {dimension}")
-
-    def drop_collection(self, name: str) -> None:
-        """删除 Milvus 集合"""
-        if self.client.has_collection(name):
-            self.client.drop_collection(name)
-            logger.info(f"Dropped collection {name}")
-
-    def has_collection(self, name: str) -> bool:
-        """检查集合是否存在"""
-        return self.client.has_collection(name)
-
-    def get_collection_details(self) -> List[Dict[str, Any]]:
-        """
-        获取所有 Collections 详细信息
-        """
-        details: List[Dict[str, Any]] = []
+        # 公共字段准备
+        created_by = doc_info.get('created_by', 'system')
+        created_time = doc_info.get('created_time', int(time.time() * 1000))
+        updated_by = doc_info.get('updated_by', 'system')
+        updated_time = doc_info.get('updated_time', int(time.time() * 1000))
+        permission = doc_info.get('permission', {})
 
-        names = self.client.list_collections()
-
-        for name in names:
-            desc = self.client.describe_collection(collection_name=name)
-            stats = self.client.get_collection_stats(collection_name=name)
-            load_state = self.client.get_load_state(collection_name=name)
-
-            # ===== 时间戳转换(按你指定写法,无封装)=====
-            created_time = None
-            updated_time = None
-
-            if desc.get("created_timestamp") is not None:
-                ts_int = int(desc["created_timestamp"])
-                physical_ms = ts_int >> 18
-                created_time = datetime.fromtimestamp(physical_ms / 1000).strftime("%Y-%m-%d %H:%M:%S")
-
-            if desc.get("update_timestamp") is not None:
-                ts_int = int(desc["update_timestamp"])
-                physical_ms = ts_int >> 18
-                updated_time = datetime.fromtimestamp(physical_ms / 1000).strftime("%Y-%m-%d %H:%M:%S")
-
-            # ===== 数量:不保底(要求返回结构必须有 row_count)=====
-            entity_count = stats["row_count"]
-
-            # ===== 状态:不保底(要求返回结构必须有 state)=====
-            status = load_state["state"]
-
-            details.append(
-                {
-                    "name": name,
-                    "status": status,
-                    "entity_count": entity_count,
-                    "description": desc.get("description", ""),
-                    "created_time": created_time,
-                    "updated_time": updated_time,
-                }
-            )
-
-        logger.info(f"成功获取Collections详细信息,共{len(details)}个")
-        return details
-
-    def set_collection_state(self, name: str, action: str) -> Dict[str, Any]:
-        """
-        改变指定 Collection 的加载状态。
-
-        参数:
-        - name: 集合名称
-        - action: 操作,取值 'load' 或 'release'
-
-        返回:
-        - 包含集合名称和当前状态的字典,例如: {"name": name, "state": "Loaded"}
-        """
-        action_norm = (action or "").strip().lower()
-        if action_norm not in {"load", "release"}:
-            raise ValueError("action 必须为 'load' 或 'release'")
-
-        # 执行加载/释放
-        if action_norm == "load":
-            self.client.load_collection(collection_name=name)
-        else:
-            self.client.release_collection(collection_name=name)
-
-        # 返回最新状态
-        load_state = self.client.get_load_state(collection_name=name)
-        state = load_state.get("state") if isinstance(load_state, dict) else load_state
-        result = {"name": name, "state": state, "action": action_norm}
-        logger.info(f"集合 {name} 状态更新为 {state} (action={action_norm})")
-        return result
-
-    def delete_collection_if_empty(self, name: str) -> Dict[str, Any]:
-        """仅当集合内容为空时删除集合,否则抛出异常"""
-        stats = self.client.get_collection_stats(collection_name=name)
-        row_count = stats.get("row_count") if isinstance(stats, dict) else None
-        if row_count is None:
-            raise ValueError("无法获取集合行数,禁止删除")
-        if int(row_count) > 0:
-            raise ValueError("集合内容不为空,不能删除")
-
-        self.client.drop_collection(collection_name=name)
-        logger.info(f"集合 {name} 已删除")
-        return {"name": name, "deleted": True}
-
-    def get_collection_detail(self, name: str) -> Dict[str, Any]:
-        """获取单个集合的详细信息,包含schema、索引等所有desc字段"""
-        desc = self.client.describe_collection(collection_name=name)
-        stats = self.client.get_collection_stats(collection_name=name)
-        load_state = self.client.get_load_state(collection_name=name)
-
-        # 时间戳转换
-        created_time = None
-        updated_time = None
-
-        if desc.get("created_timestamp") is not None:
-            ts_int = int(desc["created_timestamp"])
-            physical_ms = ts_int >> 18
-            created_time = datetime.fromtimestamp(physical_ms / 1000).strftime("%Y-%m-%d %H:%M:%S")
-
-        if desc.get("update_timestamp") is not None:
-            ts_int = int(desc["update_timestamp"])
-            physical_ms = ts_int >> 18
-            updated_time = datetime.fromtimestamp(physical_ms / 1000).strftime("%Y-%m-%d %H:%M:%S")
-
-        entity_count = stats.get("row_count", 0)
-        status = load_state.get("state") if isinstance(load_state, dict) else load_state
-
-        # 提取字段schema
-        fields = []
-        if "fields" in desc:
-            for field in desc["fields"]:
-                field_info = {
-                    "name": field.get("name"),
-                    "type": str(field.get("type")),
-                    "description": field.get("description", ""),
-                    "is_primary": field.get("is_primary", False),
-                    "auto_id": field.get("auto_id"),
-                }
-                # 向量维度
-                if "params" in field and "dim" in field["params"]:
-                    field_info["dim"] = field["params"]["dim"]
-                # 字符串长度
-                if "params" in field and "max_length" in field["params"]:
-                    field_info["max_length"] = field["params"]["max_length"]
-                # 其他params
-                if "params" in field:
-                    field_info["params"] = field["params"]
-                fields.append(field_info)
-
-        # 提取索引信息
-        indices = []
-        
-        # 尝试从 describe_collection 结果中获取 (兼容旧逻辑)
-        if "indexes" in desc:
-            for idx in desc["indexes"]:
-                index_info = {
-                    "field_name": idx.get("field_name"),
-                    "index_name": idx.get("index_name"),
-                    "index_type": idx.get("index_type"),
-                    "metric_type": idx.get("metric_type"),
-                    "params": idx.get("params"),
-                }
-                indices.append(index_info)
-        
-        # 如果没有获取到索引信息,尝试主动查询 list_indexes
-        if not indices:
+        try:
+            # 1. 幂等处理:清理旧数据
             try:
-                # 获取索引列表 (通常返回索引名称列表)
-                index_names = self.client.list_indexes(collection_name=name)
-                if index_names:
-                    for idx_name in index_names:
-                        try:
-                            # 获取索引详情
-                            idx_desc = self.client.describe_index(collection_name=name, index_name=idx_name)
-                            if idx_desc:
-                                indices.append({
-                                    "field_name": idx_desc.get("field_name"),
-                                    "index_name": idx_desc.get("index_name"),
-                                    "index_type": idx_desc.get("index_type"),
-                                    "metric_type": idx_desc.get("metric_type"),
-                                    "params": idx_desc.get("params"),
-                                })
-                        except Exception:
-                            continue
+                self.client.delete(collection_name=self.parent_collection, filter=f"document_id == '{doc_id}'")
+                self.client.delete(collection_name=self.child_collection, filter=f"document_id == '{doc_id}'")
             except Exception as e:
-                logger.warning(f"Failed to list/describe indexes for {name}: {e}")
-
-        detail = {
-            "name": name,
-            "description": desc.get("description", ""),
-            "status": status,
-            "entity_count": entity_count,
-            "created_time": created_time,
-            "updated_time": updated_time,
-            "fields": fields,
-            "enable_dynamic_field": desc.get("enable_dynamic_field", False),
-            "consistency_level": desc.get("consistency_level"),
-            "num_shards": desc.get("num_shards"),
-            "num_partitions": desc.get("num_partitions"),
-            "indices": indices,
-            "properties": desc.get("properties"),
-            "aliases": desc.get("aliases", []),
-        }
-
-        logger.info(f"成功获取集合 {name} 的详细信息")
-        return detail
-
-    
-    def update_collection_description(self, name: str, description: str) -> Dict[str, Any]:
-        """使用 alter_collection_properties 更新集合描述"""
-        description = description or ""
-
-        # 1. 更新集合 description(唯一修改点)
-        self.client.alter_collection_properties(
-            collection_name=name,
-            properties={"collection.description": description},
-        )
-
-        # 2. 重新获取集合信息
-        desc = self.client.describe_collection(collection_name=name)
-        print(desc)
-        stats = self.client.get_collection_stats(collection_name=name)
-        load_state = self.client.get_load_state(collection_name=name)
-
-        # 3. 时间戳转换(Milvus TSO -> 物理时间)
-        def ts_to_str(ts):
-            if ts is None:
-                return None
-            ts_int = int(ts)
-            physical_ms = ts_int >> 18
-            return datetime.fromtimestamp(physical_ms / 1000).strftime("%Y-%m-%d %H:%M:%S")
-
-        created_time = ts_to_str(desc.get("created_timestamp"))
-        updated_time = ts_to_str(desc.get("update_timestamp"))
-
-        entity_count = stats.get("row_count") if isinstance(stats, dict) else None
-        status = load_state.get("state") if isinstance(load_state, dict) else load_state
-
-        return {
-            "name": name,
-            "status": status,
-            "entity_count": entity_count,
-            "description": desc.get("description", ""),
-            "created_time": created_time,
-            "updated_time": updated_time,
-        }
-
-    def hybrid_search(self, collection_name: str, query_text: str,
-                     top_k: int = 3, ranker_type: str = "weighted",
-                     dense_weight: float = 0.7, sparse_weight: float = 0.3):
-        """
-        混合搜索(参考 test_hybrid_v2.6.py 的实现)
-
-        Args:
-            param: 包含collection_name的参数字典
-            query_text: 查询文本
-            top_k: 返回结果数量
-            ranker_type: 重排序类型 "weighted" 或 "rrf"
-            dense_weight: 密集向量权重(当ranker_type="weighted"时使用)
-            sparse_weight: 稀疏向量权重(当ranker_type="weighted"时使用)
-
-        Returns:
-            List[Dict]: 搜索结果列表
-        """
-        try:
-            collection_name = collection_name
-
-            # 获取 vectorstore 实例(包含 Milvus 和 BM25BuiltInFunction)
-            vectorstore = get_milvus_vectorstore(
-                collection_name=collection_name,
-                consistency_level="Strong"
-            )
-
-            # 执行混合搜索 (完全按照 test_hybrid_v2.6.py 的逻辑)
-            if ranker_type == "weighted":
-                results = vectorstore.similarity_search(
-                    query=query_text,
-                    k=top_k,
-                    ranker_type="weighted",
-                    ranker_params={"weights": [dense_weight, sparse_weight]}
-                )
-            else:  # rrf
-                results = vectorstore.similarity_search(
-                    query=query_text,
-                    k=top_k,
-                    ranker_type="rrf",
-                    ranker_params={"k": 60}
-                )
-
-            # 格式化结果,保持与其他搜索方法一致
-            formatted_results = []
-            for doc in results:
-                formatted_results.append({
-                    'id': doc.metadata.get('pk', 0),
-                    'text_content': doc.page_content,
-                    'metadata': doc.metadata,
-                    'distance': 0.0,
-                    'similarity': 1.0
-                })
-
-            logger.info(f"Hybrid search returned {len(formatted_results)} results")
-            return formatted_results
+                logger.warning(f"清理旧数据失败 (doc_id: {doc_id}): {e}")
+                # 继续执行,可能是第一次入库
 
+            # 2. 切分父子块
+            try:
+                parent_sections = self._split_md_by_h1_sections(md_text)
+                parent_entities = []
+                child_entities = []
+                
+                # 预生成所有 parent_id
+                parent_seq_to_id = {}
+                for seq, (title, _) in enumerate(parent_sections):
+                    parent_seq_to_id[seq] = self._make_parent_id(doc_id, doc_version, doc_name, title, seq)
+
+                # 3. 处理子块
+                for seq, (h1_title, sec_text) in enumerate(parent_sections):
+                    p_id = parent_seq_to_id[seq]
+                    chunks = self._split_md_by_blank_lines(sec_text)
+                    heading_path = []
+                    
+                    for c_idx, chunk in enumerate(chunks):
+                        h_info = self._is_heading_chunk(chunk)
+                        if h_info:
+                            level, title = h_info
+                            heading_path = heading_path[:level-1] + [title]
+                        
+                        outline_path = " > ".join(heading_path)
+                        
+                        child_entities.append({
+                            "text": chunk,
+                            "is_deleted": 0,
+                            "parent_id": p_id,
+                            "document_id": doc_id,
+                            "index": int(c_idx),
+                            "tag_list": tag_list,
+                            "permission": permission,
+                            "metadata": {
+                                "doc_name": doc_name,
+                                "outline_path": outline_path,
+                                "doc_version": doc_version
+                            },
+                            "created_by": created_by,
+                            "created_time": created_time,
+                            "updated_by": updated_by,
+                            "updated_time": updated_time
+                        })
+
+                # 4. 处理父块
+                for seq, (h1_title, sec_text) in enumerate(parent_sections):
+                    p_id = parent_seq_to_id[seq]
+                    slices = self._split_text_by_max_chars(sec_text, self.PARENT_MAX_CHARS)
+                    for s_idx, slice_text in enumerate(slices):
+                        parent_entities.append({
+                            "text": slice_text,
+                            "is_deleted": 0,
+                            "parent_id": p_id,
+                            "document_id": doc_id,
+                            "index": int(seq),
+                            "tag_list": tag_list,
+                            "permission": permission,
+                            "metadata": {
+                                "doc_name": doc_name,
+                                "outline_path": h1_title if h1_title not in ["__PREAMBLE__", "__NO_H1__"] else doc_name,
+                                "doc_version": doc_version
+                            },
+                            "created_by": created_by,
+                            "created_time": created_time,
+                            "updated_by": updated_by,
+                            "updated_time": updated_time
+                        })
+            except Exception as e:
+                logger.error(f"文档切分失败 (doc_id: {doc_id}): {e}")
+                raise RuntimeError(f"文档切分处理异常: {str(e)}")
+
+            # 5. 向量化并插入
+            # 处理父块
+            if parent_entities:
+                try:
+                    p_texts = [e['text'] for e in parent_entities]
+                    p_vecs = self.emb.embed_documents(p_texts)
+                    for e, v in zip(parent_entities, p_vecs): e['dense'] = v
+                except Exception as e:
+                    logger.error(f"父块向量化失败 (Embedding Service): {e}")
+                    raise RuntimeError(f"Embedding 服务调用失败: {str(e)}")
+                
+                try:
+                    self.client.insert(collection_name=self.parent_collection, data=parent_entities)
+                except Exception as e:
+                    logger.error(f"父块存入 Milvus 失败: {e}")
+                    raise RuntimeError(f"向量数据库写入失败(Parent): {str(e)}")
+                
+            # 处理子块
+            if child_entities:
+                try:
+                    c_texts = [e['text'] for e in child_entities]
+                    c_vecs = self.emb.embed_documents(c_texts)
+                    for e, v in zip(child_entities, c_vecs): e['dense'] = v
+                except Exception as e:
+                    logger.error(f"子块向量化失败 (Embedding Service): {e}")
+                    raise RuntimeError(f"Embedding 服务调用失败: {str(e)}")
+                
+                try:
+                    self.client.insert(collection_name=self.child_collection, data=child_entities)
+                except Exception as e:
+                    logger.error(f"子块存入 Milvus 失败: {e}")
+                    raise RuntimeError(f"向量数据库写入失败(Child): {str(e)}")
+
+            logger.info(f"Successfully entered knowledge base for doc_id: {doc_id}, parents: {len(parent_entities)}, children: {len(child_entities)}")
+            return len(parent_entities), len(child_entities)
+            
         except Exception as e:
-            logger.error(f"Error in hybrid search: {e}")
-            # 回退到传统的向量搜索
-            logger.info("Falling back to traditional vector search")
-
-
-# 可选:单例
-milvus_service = MilvusService()
-
-
-if __name__ == "__main__":
-    # 推荐这样跑:
-    # uv run python -m src.app.services.milvus_service
-    import json
-
-    service = MilvusService()
-    
-    # 测试混合搜索 hybrid_search
-    print("=" * 50)
-    print("测试混合检索 (Hybrid Search)")
-    print("=" * 50)
-    
-    try:
-        # 示例参数,需要根据实际情况修改
-        collection_name = "first_bfp_collection_status" 
-        query_text = "《公路水运工程临时用电技术规程》(JTT1499-2024)状态为现行"  # 修改为实际查询内容
-        
-        # 测试 weighted 模式
-        print("\n1. 测试 Weighted 重排序模式:")
-        print(f"   集合: {collection_name}")
-        print(f"   查询: {query_text}")
-        print(f"   密集权重: 0.7, 稀疏权重: 0.3")
-        
-        results_weighted = service.hybrid_search(
-            collection_name=collection_name,
-            query_text=query_text,
-            top_k=5,
-            ranker_type="weighted",
-            dense_weight=0.7,
-            sparse_weight=0.3
-        )
-        
-        print(f"\n   结果数量: {len(results_weighted)}")
-        for i, result in enumerate(results_weighted, 1):
-            print(f"   [{i}] ID: {result.get('id')}, Text: {result.get('text_content')[:50]}...")
-        
-        # 测试 RRF 模式
-        print("\n2. 测试 RRF (Reciprocal Rank Fusion) 重排序模式:")
-        print(f"   集合: {collection_name}")
-        print(f"   查询: {query_text}")
-        
-        results_rrf = service.hybrid_search(
-            collection_name=collection_name,
-            query_text=query_text,
-            top_k=5,
-            ranker_type="rrf"
-        )
-        
-        print(f"\n   结果数量: {len(results_rrf)}")
-        for i, result in enumerate(results_rrf, 1):
-            print(f"   [{i}] ID: {result.get('id')}, Text: {result.get('text_content')[:50]}...")
-        
-        print("\n✓ 混合检索测试完成")
-        
-    except Exception as e:
-        print(f"\n✗ 混合检索测试失败: {e}")
-        import traceback
-        traceback.print_exc()
-    
-    # 也可以查看集合详情
-    print("\n" + "=" * 50)
-    print("获取所有集合信息:")
-    print("=" * 50)
-    data = service.get_collection_details()
-    for item in data:
-        print(json.dumps(item, ensure_ascii=False, indent=2))
+            # 重新抛出已处理的异常或包装未处理的异常
+            if not isinstance(e, RuntimeError):
+                logger.exception(f"入库流程发生未知异常 (doc_id: {doc_id})")
+                raise RuntimeError(f"入库未知错误: {str(e)}")
+            raise e
+
+# 全局 Milvus 服务实例
+milvus_host = config_handler.get("admin_app", "MILVUS_HOST", "192.168.92.61")
+milvus_port = config_handler.get("admin_app", "MILVUS_PORT", "19530")
+milvus_service = MilvusService(
+    uri=f"http://{milvus_host}:{milvus_port}",
+    db_name=config_handler.get("admin_app", "MILVUS_DB", "lq_db"),
+    parent_collection=config_handler.get("admin_app", "PARENT_COLLECTION_NAME", "test_27_parent"),
+    child_collection=config_handler.get("admin_app", "CHILD_COLLECTION_NAME", "test_27_child")
+)

+ 315 - 107
src/app/services/sample_service.py

@@ -4,10 +4,13 @@
 """
 import logging
 import uuid
+from datetime import datetime
 from typing import Optional, List, Dict, Any, Tuple
 from app.base.async_mysql_connection import get_db_connection
 from app.base.minio_connection import get_minio_manager
 from app.core.config import config_handler
+from app.services.milvus_service import milvus_service
+from app.services.task_service import task_service
 
 logger = logging.getLogger(__name__)
 
@@ -31,6 +34,15 @@ class SampleService:
         """初始化服务"""
         # 使用统一的 MinIO 管理器
         self.minio_manager = get_minio_manager()
+        
+        # 使用全局 Milvus 服务
+        self.milvus_service = milvus_service
+        
+        # 确保集合已创建
+        try:
+            self.milvus_service.ensure_collections()
+        except Exception as e:
+            logger.error(f"初始化 Milvus 集合失败: {e}")
 
     async def get_upload_url(self, filename: str, content_type: str) -> Tuple[bool, str, Dict[str, Any]]:
         """获取 MinIO 预签名上传 URL"""
@@ -52,32 +64,107 @@ class SampleService:
         """
         conn = get_db_connection()
         if not conn:
-            return 0, "数据库连接失败"
+            return 0, "数据库连接失败,请检查数据库服务状态"
         
         cursor = conn.cursor()
+        success_count = 0
+        skipped_count = 0
+        failed_count = 0
+        error_details = []
         
         try:
-            # 1. 严格检查转换状态:只有 conversion_status = 2 (转换成功) 且 whether_to_enter = 0 (未入库) 的才能入库
-            check_sql = f"SELECT id, title FROM t_samp_document_main WHERE id IN ({','.join(['%s']*len(doc_ids))}) AND conversion_status = 2 AND whether_to_enter = 0"
-            cursor.execute(check_sql, tuple(doc_ids))
-            valid_docs = cursor.fetchall()
-            valid_ids = [doc['id'] for doc in valid_docs]
+            # 1. 获取所有选中选中的文档详情
+            placeholders = ','.join(['%s']*len(doc_ids))
+            fetch_sql = f"""
+                SELECT id, title, source_type, md_url, conversion_status, created_time 
+                FROM t_samp_document_main 
+                WHERE id IN ({placeholders})
+            """
+            cursor.execute(fetch_sql, tuple(doc_ids))
+            selected_docs = cursor.fetchall()
             
-            if not valid_ids:
-                return 0, "选中的文档中没有满足入库条件(已转换成功且未入库)的记录"
+            if not selected_docs:
+                return 0, "选中的文档在数据库中不存在"
+
+            # 2. 逐份处理
+            for doc in selected_docs:
+                doc_id = doc['id']
+                title = doc.get('title', '未命名文档')
+                status = doc.get('conversion_status')
+                md_url = doc.get('md_url')
+                
+                # A. 检查转换状态
+                if status != 2:
+                    reason = "尚未转换成功" if status == 0 else "正在转换中" if status == 1 else "转换失败"
+                    logger.warning(f"文档 {title}({doc_id}) 状态为 {status},跳过入库: {reason}")
+                    skipped_count += 1
+                    error_details.append(f"· {title}: {reason}")
+                    continue
+                
+                if not md_url:
+                    logger.warning(f"文档 {title}({doc_id}) 缺少 md_url,跳过入库")
+                    skipped_count += 1
+                    error_details.append(f"· {title}: 转换结果地址丢失")
+                    continue
+                
+                # B. 从 MinIO 获取 Markdown 内容
+                try:
+                    md_content = self.minio_manager.get_object_content(md_url)
+                    if not md_content:
+                        raise ValueError(f"无法从 MinIO 读取内容 (URL: {md_url})")
+                except Exception as minio_err:
+                    logger.error(f"读取文档 {title} 内容失败: {minio_err}")
+                    failed_count += 1
+                    error_details.append(f"· {title}: 读取云端文件失败")
+                    continue
+                
+                # C. 调用 MilvusService 进行切分和入库
+                try:
+                    # 准备元数据
+                    doc_info = {
+                        "doc_id": doc_id,
+                        "doc_name": title,
+                        "doc_version": int(doc['created_time'].strftime('%Y%m%d')) if doc.get('created_time') else 20260127,
+                        "tags": doc.get('source_type') or 'unknown'
+                    }
+                    await self.milvus_service.insert_knowledge(md_content, doc_info)
+                    
+                    # D. 添加到任务管理中心 (类型为 data)
+                    try:
+                        await task_service.add_task(doc_id, 'data')
+                    except Exception as task_err:
+                        logger.error(f"添加文档 {title} 到任务中心失败: {task_err}")
 
-            # 2. 更新状态为已入库
-            update_sql = f"UPDATE t_samp_document_main SET whether_to_enter = 1, updated_by = %s, updated_time = NOW() WHERE id IN ({','.join(['%s']*len(valid_ids))})"
-            cursor.execute(update_sql, (username, *valid_ids))
+                    # E. 更新数据库状态
+                    update_sql = "UPDATE t_samp_document_main SET whether_to_enter = 1, updated_by = %s, updated_time = NOW() WHERE id = %s"
+                    cursor.execute(update_sql, (username, doc_id))
+                    success_count += 1
+                    
+                except Exception as milvus_err:
+                    logger.exception(f"文档 {title} 写入向量库失败")
+                    failed_count += 1
+                    error_details.append(f"· {title}: 写入向量库失败 ({str(milvus_err)})")
+                    continue
             
-            affected_rows = cursor.rowcount
             conn.commit()
             
-            return affected_rows, f"成功入库 {affected_rows} 份文档"
+            # 构造详细的消息
+            msg = f"入库完成:成功 {success_count} 份"
+            if skipped_count > 0:
+                msg += f",跳过 {skipped_count} 份"
+            if failed_count > 0:
+                msg += f",失败 {failed_count} 份"
+            
+            if error_details:
+                detailed_msg = msg + "\n\n详情:\n" + "\n".join(error_details)
+                return success_count, detailed_msg
+            
+            return success_count, msg
+            
         except Exception as e:
-            logger.exception(f"文档批量入库失败: {e}")
+            logger.exception(f"文档批量入库异常: {e}")
             conn.rollback()
-            return 0, f"入库失败: {str(e)}"
+            return 0, f"操作异常: {str(e)}"
         finally:
             cursor.close()
             conn.close()
@@ -98,20 +185,12 @@ class SampleService:
             
             # 尝试同步删除子表中的数据
             try:
-                cursor.execute(f"SELECT source_type, source_id FROM t_samp_document_main WHERE id IN ({placeholders})", doc_ids)
-                docs = cursor.fetchall()
-                
-                for doc_row in docs:
-                    s_type = doc_row['source_type']
-                    s_id = doc_row['source_id']
-                    if s_type and s_id:
-                        sub_table = get_table_name(s_type)
-                        if sub_table:
-                            sub_sql = f"DELETE FROM {sub_table} WHERE id = %s"
-                            try:
-                                cursor.execute(sub_sql, (s_id,))
-                            except Exception as sub_e:
-                                logger.error(f"删除子表 {sub_table} 数据失败: {sub_e}")
+                for sub_table in TABLE_MAP.values():
+                    sub_sql = f"DELETE FROM {sub_table} WHERE id IN ({placeholders})"
+                    try:
+                        cursor.execute(sub_sql, doc_ids)
+                    except Exception as sub_e:
+                        logger.error(f"删除子表 {sub_table} 数据失败: {sub_e}")
             except Exception as sync_e:
                 logger.error(f"同步删除子表数据失败: {sync_e}")
             
@@ -120,6 +199,13 @@ class SampleService:
             cursor.execute(sql_main, doc_ids)
             affected_rows = cursor.rowcount
             
+            # 同步删除任务管理中心的数据
+            try:
+                for doc_id in doc_ids:
+                    await task_service.delete_task(doc_id)
+            except Exception as task_err:
+                logger.error(f"同步删除任务中心数据失败: {task_err}")
+
             conn.commit()
             
             return affected_rows, f"成功删除 {affected_rows} 条文档数据"
@@ -136,10 +222,14 @@ class SampleService:
         whether_to_enter: Optional[int] = None,
         keyword: Optional[str] = None,
         table_type: Optional[str] = None,
+        plan_category: Optional[str] = None,
+        level_2_classification: Optional[str] = None,
+        level_3_classification: Optional[str] = None,
+        level_4_classification: Optional[str] = None,
         page: int = 1,
         size: int = 50
     ) -> Tuple[List[Dict[str, Any]], int, int, int]:
-        """获取文档列表(从主表查询)"""
+        """获取文档列表 (支持关联查询子表)"""
         conn = get_db_connection()
         if not conn:
             return [], 0, 0, 0
@@ -150,31 +240,80 @@ class SampleService:
             where_clauses = []
             params = []
             
-            if table_type:
-                where_clauses.append("source_type = %s")
+            # 基础查询
+            if table_type and table_type in TABLE_MAP:
+                # 如果指定了类型,使用 LEFT JOIN 关联查询,以便搜索子表字段
+                sub_table = TABLE_MAP[table_type]
+                from_sql = f"t_samp_document_main m LEFT JOIN {sub_table} s ON m.id = s.id"
+                fields_sql = "m.*, s.*"  # 获取所有字段,包括子表字段
+                where_clauses.append("m.source_type = %s")
                 params.append(table_type)
+                order_sql = "m.created_time DESC"
+                title_field = "m.title"
+
+                # 施工方案特有的过滤字段
+                if table_type == 'work':
+                    if plan_category:
+                        where_clauses.append("s.plan_category = %s")
+                        params.append(plan_category)
+                    if level_2_classification:
+                        where_clauses.append("s.level_2_classification = %s")
+                        params.append(level_2_classification)
+                    if level_3_classification:
+                        where_clauses.append("s.level_3_classification = %s")
+                        params.append(level_3_classification)
+                    if level_4_classification:
+                        where_clauses.append("s.level_4_classification = %s")
+                        params.append(level_4_classification)
+                
+                # 特殊处理 id 冲突,确保返回的是主表 m.id
+                fields_sql = "m.*, s.*, m.id as id"
+            else:
+                from_sql = "t_samp_document_main"
+                fields_sql = "*"
+                order_sql = "created_time DESC"
+                title_field = "title"
+            
             if whether_to_enter is not None:
-                where_clauses.append("whether_to_enter = %s")
+                # 按照 search_replace_blocks 的逻辑,这里使用 conversion_status 过滤
+                where_clauses.append("conversion_status = %s")
                 params.append(whether_to_enter)
+            
             if keyword:
-                where_clauses.append("title LIKE %s")
+                where_clauses.append(f"{title_field} LIKE %s")
                 params.append(f"%{keyword}%")
             
             where_sql = " WHERE " + " AND ".join(where_clauses) if where_clauses else ""
             offset = (page - 1) * size
             
-            sql = f"SELECT * FROM t_samp_document_main {where_sql} ORDER BY created_time DESC LIMIT %s OFFSET %s"
+            sql = f"SELECT {fields_sql} FROM {from_sql} {where_sql} ORDER BY {order_sql} LIMIT %s OFFSET %s"
             params.extend([size, offset])
             
             logger.info(f"Executing SQL: {sql} with params: {params}")
             cursor.execute(sql, tuple(params))
             items = []
             for row in cursor.fetchall():
-                item = row # DictCursor already returns dict
+                item = row
+                # 处理 URL 转换
+                for key in ['file_url', 'md_url', 'json_url']:
+                    if item.get(key):
+                        item[key] = self.minio_manager.get_full_url(item[key])
+                
+                # 映射字段以适配前端通用显示
+                source_type = item.get('source_type')
+                if source_type == 'work':
+                    item['issuing_authority'] = item.get('compiling_unit')
+                    item['release_date'] = item.get('compiling_date')
+                elif source_type == 'job':
+                    item['issuing_authority'] = item.get('issuing_department')
+                    item['release_date'] = item.get('publish_date')
+                
                 # 格式化时间
-                for key in ['created_time', 'updated_time']:
+                for key in ['created_time', 'updated_time', 'release_date', 'publish_date', 'compiling_date']:
                     if item.get(key) and hasattr(item[key], 'isoformat'):
                         item[key] = item[key].isoformat()
+                    elif item.get(key) is not None:
+                        item[key] = str(item[key])
                 
                 # 增加格式化文件名供前端显示
                 if item.get('conversion_status') == 2:
@@ -185,7 +324,7 @@ class SampleService:
                 items.append(item)
             
             # 总数
-            count_sql = f"SELECT COUNT(*) as count FROM t_samp_document_main {where_sql}"
+            count_sql = f"SELECT COUNT(*) as count FROM {from_sql} {where_sql}"
             cursor.execute(count_sql, tuple(params[:-2]))
             res = cursor.fetchone()
             total = res['count'] if res else 0
@@ -195,7 +334,7 @@ class SampleService:
             res = cursor.fetchone()
             all_total = res['count'] if res else 0
             
-            cursor.execute("SELECT COUNT(*) as count FROM t_samp_document_main WHERE whether_to_enter = 1")
+            cursor.execute("SELECT COUNT(*) as count FROM t_samp_document_main WHERE conversion_status = 2")
             res = cursor.fetchone()
             total_entered = res['count'] if res else 0
             
@@ -208,7 +347,7 @@ class SampleService:
             conn.close()
     
     async def get_document_detail(self, doc_id: str) -> Optional[Dict[str, Any]]:
-        """获取文档详情"""
+        """获取文档详情 (关联查询子表)"""
         conn = get_db_connection()
         if not conn:
             return None
@@ -216,17 +355,52 @@ class SampleService:
         cursor = conn.cursor()
         
         try:
-            # 查询主表
+            # 1. 查询主表
             cursor.execute("SELECT * FROM t_samp_document_main WHERE id = %s", (doc_id,))
             doc = cursor.fetchone()
             if not doc:
                 return None
             
-            # 格式化时间
-            for key in ['created_time', 'updated_time']:
-                if doc.get(key) and hasattr(doc[key], 'isoformat'):
-                    doc[key] = doc[key].isoformat()
+            # 2. 根据 source_type 查询对应的子表信息
+            source_type = doc.get('source_type')
+            table_name = TABLE_MAP.get(source_type)
             
+            if table_name:
+                # 关联子表的所有字段
+                sub_sql = f"SELECT * FROM {table_name} WHERE id = %s"
+                cursor.execute(sub_sql, (doc_id,))
+                sub_data = cursor.fetchone()
+                
+                if sub_data:
+                    # 将子表字段合并到 doc 中,方便前端使用
+                    # 注意:如果字段名冲突,子表字段会覆盖主表字段(除了 id)
+                    sub_data.pop('id', None)
+                    
+                    # 特殊处理一些前端需要的映射字段
+                    if source_type == 'basis':
+                        doc['standard_no'] = sub_data.get('standard_number')
+                    elif source_type == 'work':
+                        doc['issuing_authority'] = sub_data.get('compiling_unit')
+                        doc['release_date'] = sub_data.get('compiling_date')
+                    elif source_type == 'job':
+                        doc['issuing_authority'] = sub_data.get('issuing_department')
+                        doc['release_date'] = sub_data.get('publish_date')
+
+                    doc.update(sub_data)
+            
+            # 格式化时间
+            for key in ['created_time', 'updated_time', 'release_date', 'publish_date', 'compiling_date', 'implementation_date']:
+                val = doc.get(key)
+                if val and hasattr(val, 'isoformat'):
+                    doc[key] = val.isoformat()
+                elif val is not None:
+                    doc[key] = str(val)
+            
+            # 处理 URL 转换
+            for key in ['file_url', 'md_url', 'json_url']:
+                if doc.get(key):
+                    doc[key] = self.minio_manager.get_full_url(doc[key])
+
             # 增加格式化文件名供前端显示
             if doc.get('conversion_status') == 2:
                 title = doc.get('title', 'document')
@@ -269,24 +443,26 @@ class SampleService:
         
         try:
             doc_id = str(uuid.uuid4())
-            source_id = str(uuid.uuid4())
             table_type = doc_data.get('table_type', 'basis')
             table_name = TABLE_MAP.get(table_type)
             
             # 安全转换字段
             release_date = self._to_date(doc_data.get('release_date'))
             
+            # 处理 URL 存储(转为相对路径)
+            file_url = self.minio_manager.get_relative_path(doc_data.get('file_url'))
+            
             # 1. 插入主表 (作为资产中心)
             cursor.execute(
                 """
                 INSERT INTO t_samp_document_main (
-                    id, title, source_type, source_id, file_url, 
+                    id, title, source_type, file_url, 
                     file_extension, created_by, updated_by, created_time, updated_time,
                     conversion_status
-                ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, NOW(), NOW(), 0)
+                ) VALUES (%s, %s, %s, %s, %s, %s, %s, NOW(), NOW(), 0)
                 """,
                 (
-                    doc_id, doc_data.get('title'), table_type, source_id, doc_data.get('file_url'),
+                    doc_id, doc_data.get('title'), table_type, file_url,
                     doc_data.get('file_extension'), user_id, user_id
                 )
             )
@@ -294,20 +470,26 @@ class SampleService:
             # 2. 插入子表 (仅存储业务字段)
             if table_type == 'basis':
                 cursor.execute(
-                    f"INSERT INTO {table_name} (id, chinese_name, standard_number, issuing_authority, release_date, document_type, professional_field, validity, created_by, created_time, updated_time) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, NOW(), NOW())",
-                    (source_id, doc_data.get('title'), doc_data.get('standard_no'), doc_data.get('issuing_authority'), release_date, doc_data.get('document_type'), doc_data.get('professional_field'), doc_data.get('validity'), user_id)
+                    f"INSERT INTO {table_name} (id, chinese_name, standard_number, issuing_authority, release_date, document_type, professional_field, validity, note, created_by, created_time, updated_time) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, NOW(), NOW())",
+                    (doc_id, doc_data.get('title'), doc_data.get('standard_no'), doc_data.get('issuing_authority'), release_date, doc_data.get('document_type'), doc_data.get('professional_field'), doc_data.get('validity'), doc_data.get('note'), user_id)
                 )
             elif table_type == 'work':
                 cursor.execute(
-                    f"INSERT INTO {table_name} (id, plan_name, project_name, project_section, compiling_unit, compiling_date, created_by, created_time, updated_time) VALUES (%s, %s, %s, %s, %s, %s, %s, NOW(), NOW())",
-                    (source_id, doc_data.get('title'), doc_data.get('project_name'), doc_data.get('project_section'), doc_data.get('issuing_authority'), release_date, user_id)
+                    f"INSERT INTO {table_name} (id, plan_name, project_name, project_section, compiling_unit, compiling_date, plan_summary, compilation_basis, plan_category, level_1_classification, level_2_classification, level_3_classification, level_4_classification, note, created_by, created_time, updated_time) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, NOW(), NOW())",
+                    (doc_id, doc_data.get('title'), doc_data.get('project_name'), doc_data.get('project_section'), doc_data.get('issuing_authority'), release_date, doc_data.get('plan_summary'), doc_data.get('compilation_basis'), doc_data.get('plan_category'), doc_data.get('level_1_classification'), doc_data.get('level_2_classification'), doc_data.get('level_3_classification'), doc_data.get('level_4_classification'), doc_data.get('note'), user_id)
                 )
             elif table_type == 'job':
                 cursor.execute(
-                    f"INSERT INTO {table_name} (id, file_name, issuing_department, document_type, publish_date, created_by, created_time, updated_time) VALUES (%s, %s, %s, %s, %s, %s, NOW(), NOW())",
-                    (source_id, doc_data.get('title'), doc_data.get('issuing_authority'), doc_data.get('document_type'), release_date, user_id)
+                    f"INSERT INTO {table_name} (id, file_name, issuing_department, document_type, publish_date, note, created_by, created_time, updated_time) VALUES (%s, %s, %s, %s, %s, %s, %s, NOW(), NOW())",
+                    (doc_id, doc_data.get('title'), doc_data.get('issuing_authority'), doc_data.get('document_type'), release_date, doc_data.get('note'), user_id)
                 )
             
+            # 3. 添加到任务管理中心 (类型为 data)
+            try:
+                await task_service.add_task(doc_id, 'data')
+            except Exception as task_err:
+                logger.error(f"添加文档 {doc_data.get('title')} 到任务中心失败: {task_err}")
+
             conn.commit()
             return True, "文档添加成功", doc_id
         except Exception as e:
@@ -328,13 +510,15 @@ class SampleService:
         
         try:
             doc_id = doc_data.get('id')
-            source_id = doc_data.get('source_id')
             table_type = doc_data.get('table_type', 'basis')
             table_name = TABLE_MAP.get(table_type)
             
             # 安全转换字段
             release_date = self._to_date(doc_data.get('release_date'))
             
+            # 处理 URL 存储(转为相对路径)
+            file_url = self.minio_manager.get_relative_path(doc_data.get('file_url'))
+            
             # 1. 更新主表
             cursor.execute(
                 """
@@ -344,7 +528,7 @@ class SampleService:
                 WHERE id = %s
                 """,
                 (
-                    doc_data.get('title'), doc_data.get('file_url'), doc_data.get('file_extension'),
+                    doc_data.get('title'), file_url, doc_data.get('file_extension'),
                     updater_id, doc_id
                 )
             )
@@ -352,18 +536,18 @@ class SampleService:
             # 2. 更新子表
             if table_type == 'basis':
                 cursor.execute(
-                    f"UPDATE {table_name} SET chinese_name = %s, standard_number = %s, issuing_authority = %s, release_date = %s, document_type = %s, professional_field = %s, validity = %s, updated_by = %s, updated_time = NOW() WHERE id = %s",
-                    (doc_data.get('title'), doc_data.get('standard_no'), doc_data.get('issuing_authority'), release_date, doc_data.get('document_type'), doc_data.get('professional_field'), doc_data.get('validity'), updater_id, source_id)
+                    f"UPDATE {table_name} SET chinese_name = %s, standard_number = %s, issuing_authority = %s, release_date = %s, document_type = %s, professional_field = %s, validity = %s, note = %s, updated_by = %s, updated_time = NOW() WHERE id = %s",
+                    (doc_data.get('title'), doc_data.get('standard_no'), doc_data.get('issuing_authority'), release_date, doc_data.get('document_type'), doc_data.get('professional_field'), doc_data.get('validity'), doc_data.get('note'), updater_id, doc_id)
                 )
             elif table_type == 'work':
                 cursor.execute(
-                    f"UPDATE {table_name} SET plan_name = %s, project_name = %s, project_section = %s, compiling_unit = %s, compiling_date = %s, updated_by = %s, updated_time = NOW() WHERE id = %s",
-                    (doc_data.get('title'), doc_data.get('project_name'), doc_data.get('project_section'), doc_data.get('issuing_authority'), release_date, updater_id, source_id)
+                    f"UPDATE {table_name} SET plan_name = %s, project_name = %s, project_section = %s, compiling_unit = %s, compiling_date = %s, plan_summary = %s, compilation_basis = %s, plan_category = %s, level_1_classification = %s, level_2_classification = %s, level_3_classification = %s, level_4_classification = %s, note = %s, updated_by = %s, updated_time = NOW() WHERE id = %s",
+                    (doc_data.get('title'), doc_data.get('project_name'), doc_data.get('project_section'), doc_data.get('issuing_authority'), release_date, doc_data.get('plan_summary'), doc_data.get('compilation_basis'), doc_data.get('plan_category'), doc_data.get('level_1_classification'), doc_data.get('level_2_classification'), doc_data.get('level_3_classification'), doc_data.get('level_4_classification'), doc_data.get('note'), updater_id, doc_id)
                 )
             elif table_type == 'job':
                 cursor.execute(
-                    f"UPDATE {table_name} SET file_name = %s, issuing_department = %s, document_type = %s, publish_date = %s, updated_by = %s, updated_time = NOW() WHERE id = %s",
-                    (doc_data.get('title'), doc_data.get('issuing_authority'), doc_data.get('document_type'), release_date, updater_id, source_id)
+                    f"UPDATE {table_name} SET file_name = %s, issuing_department = %s, document_type = %s, publish_date = %s, note = %s, updated_by = %s, updated_time = NOW() WHERE id = %s",
+                    (doc_data.get('title'), doc_data.get('issuing_authority'), doc_data.get('document_type'), release_date, doc_data.get('note'), updater_id, doc_id)
                 )
 
             conn.commit()
@@ -404,8 +588,9 @@ class SampleService:
                 fields = """
                     s.id, s.chinese_name as title, s.standard_number as standard_no, 
                     s.issuing_authority, s.release_date, s.document_type, 
-                    s.professional_field, s.validity, s.created_by, s.created_time,
-                    m.file_url, m.conversion_status, m.md_url, m.json_url, m.id as doc_id
+                    s.professional_field, s.validity, s.note, s.created_by, s.created_time,
+                    s.updated_by, s.updated_time,
+                    m.file_url, m.conversion_status, m.md_url, m.json_url
                 """
                 field_map = {
                     'title': 's.chinese_name',
@@ -420,15 +605,24 @@ class SampleService:
                 table_name = "t_samp_construction_plan_base_info"
                 fields = """
                     s.id, s.plan_name as title, NULL as standard_no, 
+                    s.project_name, s.project_section,
                     s.compiling_unit as issuing_authority, s.compiling_date as release_date, 
                     NULL as document_type, NULL as professional_field, NULL as validity, 
-                    s.created_by, s.created_time,
-                    m.file_url, m.conversion_status, m.md_url, m.json_url, m.id as doc_id
+                    s.plan_summary, s.compilation_basis,
+                    s.plan_category, s.level_1_classification, s.level_2_classification,
+                    s.level_3_classification, s.level_4_classification,
+                    s.note, s.created_by, s.created_time, s.updated_by, s.updated_time,
+                    m.file_url, m.conversion_status, m.md_url, m.json_url
                 """
                 field_map = {
                     'title': 's.plan_name',
                     'issuing_authority': 's.compiling_unit',
-                    'release_date': 's.compiling_date'
+                    'release_date': 's.compiling_date',
+                    'plan_category': 's.plan_category',
+                    'level_1_classification': 's.level_1_classification',
+                    'level_2_classification': 's.level_2_classification',
+                    'level_3_classification': 's.level_3_classification',
+                    'level_4_classification': 's.level_4_classification'
                 }
             elif type == 'job':
                 table_name = "t_samp_office_regulations"
@@ -436,8 +630,8 @@ class SampleService:
                     s.id, s.file_name as title, NULL as standard_no, 
                     s.issuing_department as issuing_authority, s.publish_date as release_date, 
                     s.document_type, NULL as professional_field, NULL as validity, 
-                    s.created_by, s.created_time,
-                    m.file_url, m.conversion_status, m.md_url, m.json_url, m.id as doc_id
+                    s.note, s.created_by, s.created_time, s.updated_by, s.updated_time,
+                    m.file_url, m.conversion_status, m.md_url, m.json_url
                 """
                 field_map = {
                     'title': 's.file_name',
@@ -480,19 +674,25 @@ class SampleService:
             sql = f"""
                 SELECT {fields} 
                 FROM {table_name} s
-                LEFT JOIN t_samp_document_main m ON s.id = m.source_id AND m.source_type = %s
+                LEFT JOIN t_samp_document_main m ON s.id = m.id
                 {where_sql} 
                 ORDER BY s.created_time DESC 
                 LIMIT %s OFFSET %s
             """
-            params = [type] + params + [size, offset]
+            params = params + [size, offset]
             
             cursor.execute(sql, tuple(params))
             items = cursor.fetchall()
             
+            # 处理 URL 转换
+            for item in items:
+                for key in ['file_url', 'md_url', 'json_url']:
+                    if item.get(key):
+                        item[key] = self.minio_manager.get_full_url(item[key])
+            
             # 总数
             count_sql = f"SELECT COUNT(*) as count FROM {table_name} s {where_sql}"
-            cursor.execute(count_sql, tuple(params[1:-2]))
+            cursor.execute(count_sql, tuple(params[:-2]))
             res = cursor.fetchone()
             total = res['count'] if res else 0
             
@@ -597,6 +797,7 @@ class SampleService:
 
     async def add_basic_info(self, type: str, data: Dict[str, Any], user_id: str) -> Tuple[bool, str]:
         """新增基本信息"""
+        logger.info(f"Adding basic info for type {type}: {data}")
         conn = get_db_connection()
         if not conn:
             return False, "数据库连接失败"
@@ -607,7 +808,6 @@ class SampleService:
             if not table_name:
                 return False, "无效的类型"
             
-            source_id = str(uuid.uuid4())
             doc_id = str(uuid.uuid4())
             file_url = data.get('file_url')
             file_extension = file_url.split('.')[-1] if file_url and '.' in file_url else None
@@ -616,34 +816,40 @@ class SampleService:
             cursor.execute(
                 """
                 INSERT INTO t_samp_document_main (
-                    id, title, source_type, source_id, file_url, 
+                    id, title, source_type, file_url, 
                     file_extension, created_by, updated_by, created_time, updated_time,
                     conversion_status
-                ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, NOW(), NOW(), 0)
+                ) VALUES (%s, %s, %s, %s, %s, %s, %s, NOW(), NOW(), 0)
                 """,
                 (
-                    doc_id, data.get('title'), type, source_id, file_url,
+                    doc_id, data.get('title'), type, file_url,
                     file_extension, user_id, user_id
                 )
             )
             
             # 2. 插入子表 (移除 file_url,因为它现在只存储在主表中)
             if type == 'basis':
-                sql = f"INSERT INTO {table_name} (id, chinese_name, standard_number, issuing_authority, release_date, document_type, professional_field, validity, created_by, created_time, updated_time) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, NOW(), NOW())"
-                params = (source_id, data.get('title'), data.get('standard_no'), data.get('issuing_authority'), self._to_date(data.get('release_date')), data.get('document_type'), data.get('professional_field'), data.get('validity', '现行'), user_id)
+                sql = f"INSERT INTO {table_name} (id, chinese_name, standard_number, issuing_authority, release_date, document_type, professional_field, validity, note, created_by, created_time, updated_time) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, NOW(), NOW())"
+                params = (doc_id, data.get('title'), data.get('standard_no'), data.get('issuing_authority'), self._to_date(data.get('release_date')), data.get('document_type'), data.get('professional_field'), data.get('validity', '现行'), data.get('note'), user_id)
             elif type == 'work':
-                sql = f"INSERT INTO {table_name} (id, plan_name, project_name, project_section, compiling_unit, compiling_date, created_by, created_time, updated_time) VALUES (%s, %s, %s, %s, %s, %s, %s, NOW(), NOW())"
-                params = (source_id, data.get('title'), data.get('project_name'), data.get('project_section'), data.get('issuing_authority'), self._to_date(data.get('release_date')), user_id)
+                sql = f"INSERT INTO {table_name} (id, plan_name, project_name, project_section, compiling_unit, compiling_date, plan_summary, compilation_basis, plan_category, level_1_classification, level_2_classification, level_3_classification, level_4_classification, note, created_by, created_time, updated_time) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, NOW(), NOW())"
+                params = (doc_id, data.get('title'), data.get('project_name'), data.get('project_section'), data.get('issuing_authority'), self._to_date(data.get('release_date')), data.get('plan_summary'), data.get('compilation_basis'), data.get('plan_category'), data.get('level_1_classification'), data.get('level_2_classification'), data.get('level_3_classification'), data.get('level_4_classification'), data.get('note'), user_id)
             elif type == 'job':
-                sql = f"INSERT INTO {table_name} (id, file_name, issuing_department, document_type, publish_date, created_by, created_time, updated_time) VALUES (%s, %s, %s, %s, %s, %s, NOW(), NOW())"
-                params = (source_id, data.get('title'), data.get('issuing_authority'), data.get('document_type'), self._to_date(data.get('release_date')), user_id)
+                sql = f"INSERT INTO {table_name} (id, file_name, issuing_department, document_type, publish_date, note, created_by, created_time, updated_time) VALUES (%s, %s, %s, %s, %s, %s, %s, NOW(), NOW())"
+                params = (doc_id, data.get('title'), data.get('issuing_authority'), data.get('document_type'), self._to_date(data.get('release_date')), data.get('note'), user_id)
             else:
                 return False, "不支持的类型"
             
             cursor.execute(sql, params)
             
+            # 3. 添加到任务管理中心 (类型为 data)
+            try:
+                await task_service.add_task(doc_id, 'data')
+            except Exception as task_err:
+                logger.error(f"添加基本信息 {data.get('title')} 到任务中心失败: {task_err}")
+
             conn.commit()
-            return True, "新增成功"
+            return True, "新增成功", doc_id
         except Exception as e:
             logger.exception("新增基本信息失败")
             conn.rollback()
@@ -654,6 +860,7 @@ class SampleService:
 
     async def edit_basic_info(self, type: str, info_id: str, data: Dict[str, Any], updater_id: str) -> Tuple[bool, str]:
         """编辑基本信息"""
+        logger.info(f"Editing basic info for type {type}, id {info_id}: {data}")
         conn = get_db_connection()
         if not conn:
             return False, "数据库连接失败"
@@ -664,7 +871,8 @@ class SampleService:
             if not table_name:
                 return False, "无效的类型"
             
-            file_url = data.get('file_url')
+            # 处理 URL 存储(转为相对路径)
+            file_url = self.minio_manager.get_relative_path(data.get('file_url'))
             file_extension = file_url.split('.')[-1] if file_url and '.' in file_url else None
 
             # 1. 更新主表 (解耦触发器)
@@ -672,9 +880,9 @@ class SampleService:
                 """
                 UPDATE t_samp_document_main 
                 SET title = %s, file_url = %s, file_extension = %s, updated_by = %s, updated_time = NOW()
-                WHERE source_id = %s AND source_type = %s
+                WHERE id = %s
                 """,
-                (data.get('title'), file_url, file_extension, updater_id, info_id, type)
+                (data.get('title'), file_url, file_extension, updater_id, info_id)
             )
 
             # 2. 更新子表 (移除 file_url)
@@ -685,7 +893,7 @@ class SampleService:
                     document_type = %s, professional_field = %s, validity = %s, 
                     english_name = %s, implementation_date = %s, drafting_unit = %s, 
                     approving_department = %s, participating_units = %s, engineering_phase = %s, 
-                    reference_basis = %s, source_url = %s,
+                    reference_basis = %s, source_url = %s, note = %s,
                     updated_by = %s, updated_time = NOW() 
                 WHERE id = %s
                 """
@@ -694,43 +902,37 @@ class SampleService:
                     data.get('document_type'), data.get('professional_field'), data.get('validity'),
                     data.get('english_name'), self._to_date(data.get('implementation_date')), data.get('drafting_unit'),
                     data.get('approving_department'), data.get('participating_units'), data.get('engineering_phase'),
-                    data.get('reference_basis'), data.get('source_url'),
+                    data.get('reference_basis'), data.get('source_url'), data.get('note'),
                     updater_id, info_id
                 )
             elif type == 'work':
-                # 构造 compilation_basis 更新部分
-                basis_updates = ", ".join([f"compilation_basis_{i} = %s" for i in range(1, 10)])
-                
                 sql = f"""
                 UPDATE {table_name} 
                 SET plan_name = %s, project_name = %s, project_section = %s, compiling_unit = %s, compiling_date = %s, 
-                    plan_summary = %s, {basis_updates},
-                    updated_by = %s, updated_time = NOW() 
+                    plan_summary = %s, compilation_basis = %s, plan_category = %s, 
+                    level_1_classification = %s, level_2_classification = %s, level_3_classification = %s, level_4_classification = %s,
+                    note = %s, updated_by = %s, updated_time = NOW() 
                 WHERE id = %s
                 """
                 
-                # 准备 compilation_basis 参数
-                basis_params = [data.get(f'compilation_basis_{i}') for i in range(1, 10)]
-                
-                params = [
+                params = (
                     data.get('title'), data.get('project_name'), data.get('project_section'), data.get('issuing_authority'), self._to_date(data.get('release_date')),
-                    data.get('plan_summary')
-                ] + basis_params + [updater_id, info_id]
-                
-                # 转换为 tuple
-                params = tuple(params)
+                    data.get('plan_summary'), data.get('compilation_basis'), data.get('plan_category'),
+                    data.get('level_1_classification'), data.get('level_2_classification'), data.get('level_3_classification'), data.get('level_4_classification'),
+                    data.get('note'), updater_id, info_id
+                )
                 
             elif type == 'job':
                 sql = f"""
                 UPDATE {table_name} 
                 SET file_name = %s, issuing_department = %s, document_type = %s, publish_date = %s, 
-                    effective_start_date = %s, effective_end_date = %s,
+                    effective_start_date = %s, effective_end_date = %s, note = %s,
                     updated_by = %s, updated_time = NOW() 
                 WHERE id = %s
                 """
                 params = (
                     data.get('title'), data.get('issuing_authority'), data.get('document_type'), self._to_date(data.get('release_date')), 
-                    self._to_date(data.get('effective_start_date')), self._to_date(data.get('effective_end_date')),
+                    self._to_date(data.get('effective_start_date')), self._to_date(data.get('effective_end_date')), data.get('note'),
                     updater_id, info_id
                 )
             else:
@@ -760,9 +962,15 @@ class SampleService:
             if not table_name:
                 return False, "无效的类型"
             
-            # 1. 删除子表记录 (触发器会自动删除主表记录)
-            cursor.execute(f"DELETE FROM {table_name} WHERE id = %s", (info_id,))
+            # 1. 删除主表记录 (由于设置了 ON DELETE CASCADE,子表记录会自动删除)
+            cursor.execute("DELETE FROM t_samp_document_main WHERE id = %s", (info_id,))
             
+            # 同步删除任务管理中心的数据
+            try:
+                await task_service.delete_task(info_id)
+            except Exception as task_err:
+                logger.error(f"同步删除任务中心数据失败 (ID: {info_id}): {task_err}")
+
             conn.commit()
             return True, "删除成功"
         except Exception as e:

+ 37 - 12
src/app/services/snippet_service.py

@@ -7,6 +7,7 @@ import json
 import random
 import csv
 import io
+import time
 from datetime import datetime
 
 from app.services.milvus_service import milvus_service
@@ -130,16 +131,28 @@ class SnippetService:
     def create(self, payload: Any) -> Dict:
         """创建知识片段"""
         # 使用统一算法生成向量
-        fake_vector = text_to_vector_algo(payload.content, dim=768)
+        dim = milvus_service.DENSE_DIM
+        fake_vector = text_to_vector_algo(payload.content, dim=dim)
         
         # 基础数据
+        now = int(time.time() * 1000)
         item = {
-            "vector": fake_vector,
+            "dense": fake_vector,
             "text": payload.content,
-            "source": payload.doc_name,
-            "doc_id": "manual_add",
-            "file_name": payload.doc_name, 
-            "title": payload.doc_name
+            "document_id": "manual_add",
+            "tag_list": "",
+            "permission": {},
+            "metadata": {
+                "doc_name": payload.doc_name,
+                "file_name": payload.doc_name, 
+                "title": payload.doc_name
+            },
+            "index": 0,
+            "is_deleted": 0,
+            "created_by": "system",
+            "created_time": now,
+            "updated_by": "system",
+            "updated_time": now
         }
         
         # 合并自定义字段
@@ -174,15 +187,27 @@ class SnippetService:
         
         # 2. 插入新数据
         # 使用统一算法生成向量
-        fake_vector = text_to_vector_algo(payload.content, dim=768)
+        dim = milvus_service.DENSE_DIM
+        fake_vector = text_to_vector_algo(payload.content, dim=dim)
         
+        now = int(time.time() * 1000)
         item = {
-            "vector": fake_vector,
+            "dense": fake_vector,
             "text": payload.content,
-            "source": payload.doc_name or "已更新",
-            "doc_id": "updated",
-            "file_name": payload.doc_name,
-            "title": payload.doc_name
+            "document_id": "updated",
+            "tag_list": "",
+            "permission": {},
+            "metadata": {
+                "doc_name": payload.doc_name or "已更新",
+                "file_name": payload.doc_name,
+                "title": payload.doc_name
+            },
+            "index": 0,
+            "is_deleted": 0,
+            "created_by": "system",
+            "created_time": now,
+            "updated_by": "system",
+            "updated_time": now
         }
         
         # 合并自定义字段

+ 105 - 0
src/app/services/task_service.py

@@ -0,0 +1,105 @@
+
+import logging
+from typing import List, Dict, Any, Tuple
+from app.base.async_mysql_connection import get_db_connection
+
+logger = logging.getLogger(__name__)
+
+class TaskService:
+    """任务管理服务类"""
+    
+    async def get_tasks(self, task_type: str) -> List[Dict[str, Any]]:
+        """获取任务列表
+        
+        Args:
+            task_type: 任务类型, 'data' 或 'image'
+        """
+        conn = get_db_connection()
+        if not conn:
+            return []
+        
+        cursor = conn.cursor()
+        try:
+            if task_type == 'data':
+                # 类型为数据的,从 t_samp_document_main 拿名称
+                sql = """
+                    SELECT 
+                        t.id, 
+                        t.task_id, 
+                        t.type,
+                        d.title as name
+                    FROM t_task_management t
+                    JOIN t_samp_document_main d ON t.id COLLATE utf8mb4_unicode_ci = d.id COLLATE utf8mb4_unicode_ci
+                    WHERE t.type = 'data'
+                    ORDER BY d.created_time DESC
+                """
+            elif task_type == 'image':
+                # 类型为图片的,从 t_image_info 拿名称
+                sql = """
+                    SELECT 
+                        t.id, 
+                        t.task_id, 
+                        t.type,
+                        i.image_name as name
+                    FROM t_task_management t
+                    JOIN t_image_info i ON t.id COLLATE utf8mb4_unicode_ci = i.id COLLATE utf8mb4_unicode_ci
+                    WHERE t.type = 'image'
+                    ORDER BY i.created_time DESC
+                """
+            else:
+                return []
+                
+            cursor.execute(sql)
+            return cursor.fetchall()
+        except Exception as e:
+            logger.exception(f"获取任务列表失败 ({task_type}): {e}")
+            return []
+        finally:
+            cursor.close()
+            conn.close()
+
+    async def add_task(self, id: str, task_type: str, task_id: str = None) -> Tuple[bool, str]:
+        """添加任务记录"""
+        conn = get_db_connection()
+        if not conn:
+            return False, "数据库连接失败"
+        
+        cursor = conn.cursor()
+        try:
+            sql = """
+                INSERT INTO t_task_management (id, task_id, type)
+                VALUES (%s, %s, %s)
+                ON DUPLICATE KEY UPDATE task_id = VALUES(task_id)
+            """
+            cursor.execute(sql, (id, task_id, task_type))
+            conn.commit()
+            return True, "添加成功"
+        except Exception as e:
+            logger.exception(f"添加任务记录失败: {e}")
+            conn.rollback()
+            return False, f"添加失败: {str(e)}"
+        finally:
+            cursor.close()
+            conn.close()
+
+    async def delete_task(self, id: str) -> Tuple[bool, str]:
+        """删除任务记录"""
+        conn = get_db_connection()
+        if not conn:
+            return False, "数据库连接失败"
+        
+        cursor = conn.cursor()
+        try:
+            sql = "DELETE FROM t_task_management WHERE id = %s"
+            cursor.execute(sql, (id,))
+            conn.commit()
+            return True, "删除成功"
+        except Exception as e:
+            logger.exception(f"删除任务记录失败: {e}")
+            conn.rollback()
+            return False, f"删除失败: {str(e)}"
+        finally:
+            cursor.close()
+            conn.close()
+
+task_service = TaskService()

+ 108 - 12
src/views/sample_view.py

@@ -17,6 +17,8 @@ from app.services.jwt_token import verify_token
 from app.schemas.base import ApiResponse
 from app.base import get_mineru_manager
 
+from app.services.task_service import task_service
+
 # 获取logger
 logger = logging.getLogger(__name__)
 
@@ -26,6 +28,19 @@ security = HTTPBearer()
 security_optional = HTTPBearer(auto_error=False)
 
 
+@router.get("/tasks")
+async def get_tasks(type: str, credentials: HTTPAuthorizationCredentials = Depends(security)):
+    """获取任务列表"""
+    try:
+        payload = verify_token(credentials.credentials)
+        if not payload:
+            return ApiResponse(code=401, message="无效的访问令牌", timestamp=datetime.now(timezone.utc).isoformat()).model_dump()
+        
+        tasks = await task_service.get_tasks(type)
+        return ApiResponse(code=0, message="成功", data=tasks, timestamp=datetime.now(timezone.utc).isoformat()).model_dump()
+    except Exception as e:
+        logger.exception("获取任务列表失败")
+        return ApiResponse(code=500, message=str(e), timestamp=datetime.now(timezone.utc).isoformat()).model_dump()
 
 
 # --- 文档管理中心 API ---
@@ -81,7 +96,36 @@ async def proxy_view(url: str, token: Optional[str] = None, credentials: Optiona
             content_type = response.headers.get("content-type", "").lower()
             
             # 如果是 PDF 或其他二进制文件
-            if "application/pdf" in content_type or any(ext in url.lower() for ext in [".pdf", ".png", ".jpg", ".jpeg", ".gif"]):
+            binary_extensions = {
+                ".pdf": "application/pdf",
+                ".png": "image/png",
+                ".jpg": "image/jpeg",
+                ".jpeg": "image/jpeg",
+                ".gif": "image/gif",
+                ".doc": "application/msword",
+                ".docx": "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
+                ".xls": "application/vnd.ms-excel",
+                ".xlsx": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
+                ".ppt": "application/vnd.ms-powerpoint",
+                ".pptx": "application/vnd.openxmlformats-officedocument.presentationml.presentation",
+                ".zip": "application/zip",
+                ".rar": "application/x-rar-compressed",
+                ".7z": "application/x-7z-compressed"
+            }
+            
+            is_binary = "application/pdf" in content_type or \
+                        "application/vnd." in content_type or \
+                        "application/msword" in content_type or \
+                        "application/octet-stream" in content_type or \
+                        any(ext in url.lower() for ext in binary_extensions.keys())
+            
+            if is_binary:
+                # 尝试根据扩展名修正 media_type
+                for ext, m_type in binary_extensions.items():
+                    if ext in url.lower():
+                        content_type = m_type
+                        break
+                
                 return Response(
                     content=response.content,
                     media_type=content_type,
@@ -90,7 +134,20 @@ async def proxy_view(url: str, token: Optional[str] = None, credentials: Optiona
             
             # 默认处理为 HTML
             try:
-                content = response.text
+                # 尝试多种编码解码 content
+                data = response.content
+                content = None
+                encodings = ['utf-8', 'gbk', 'utf-8-sig', 'gb18030']
+                
+                for enc in encodings:
+                    try:
+                        content = data.decode(enc)
+                        break
+                    except UnicodeDecodeError:
+                        continue
+                
+                if content is None:
+                    content = data.decode('utf-8', errors='ignore')
                 
                 # 简单的注入一些基础样式,确保内容在 iframe 中显示良好
                 base_style = """
@@ -278,7 +335,7 @@ async def add_document(doc: DocumentAdd, credentials: HTTPAuthorizationCredentia
         # 将 DocumentAdd 对象转换为字典
         doc_data = {
             'title': doc.title,
-            'content': doc.content,
+            'note': doc.note,
             'table_type': doc.table_type,
             'primary_category_id': doc.primary_category_id,
             'secondary_category_id': doc.secondary_category_id,
@@ -293,7 +350,14 @@ async def add_document(doc: DocumentAdd, credentials: HTTPAuthorizationCredentia
             'professional_field': doc.professional_field,
             'validity': doc.validity,
             'project_name': doc.project_name,
-            'project_section': doc.project_section
+            'project_section': doc.project_section,
+            'compilation_basis': doc.compilation_basis,
+            'plan_summary': doc.plan_summary,
+            'plan_category': doc.plan_category,
+            'level_1_classification': doc.level_1_classification,
+            'level_2_classification': doc.level_2_classification,
+            'level_3_classification': doc.level_3_classification,
+            'level_4_classification': doc.level_4_classification
         }
         
         success, message, doc_id = await sample_service.add_document(doc_data, user_id)
@@ -334,6 +398,10 @@ async def get_document_list(
     whether_to_enter: Optional[int] = None,
     keyword: Optional[str] = None,
     table_type: Optional[str] = None,
+    plan_category: Optional[str] = None,
+    level_2_classification: Optional[str] = None,
+    level_3_classification: Optional[str] = None,
+    level_4_classification: Optional[str] = None,
     page: int = 1, 
     size: int = 50,
     credentials: HTTPAuthorizationCredentials = Depends(security)
@@ -349,6 +417,10 @@ async def get_document_list(
             whether_to_enter=whether_to_enter,
             keyword=keyword,
             table_type=table_type,
+            plan_category=plan_category,
+            level_2_classification=level_2_classification,
+            level_3_classification=level_3_classification,
+            level_4_classification=level_4_classification,
             page=page,
             size=size
         )
@@ -378,18 +450,20 @@ async def edit_document(doc: DocumentAdd, credentials: HTTPAuthorizationCredenti
         if not payload:
             return ApiResponse(code=401, message="无效的访问令牌", timestamp=datetime.now(timezone.utc).isoformat()).model_dump()
             
-        if not doc.id or not doc.source_id:
+        if not doc.id:
             return ApiResponse(code=400, message="缺少ID参数", timestamp=datetime.now(timezone.utc).isoformat()).model_dump()
         
         # 调用 service 层
         sample_service = SampleService()
         
+        # 获取更新人ID
+        updater_id = payload.get("username", "admin")
+        
         # 将 DocumentAdd 对象转换为字典
         doc_data = {
             'id': doc.id,
-            'source_id': doc.source_id,
             'title': doc.title,
-            'content': doc.content,
+            'note': doc.note,
             'table_type': doc.table_type,
             'primary_category_id': doc.primary_category_id,
             'secondary_category_id': doc.secondary_category_id,
@@ -404,10 +478,17 @@ async def edit_document(doc: DocumentAdd, credentials: HTTPAuthorizationCredenti
             'professional_field': doc.professional_field if hasattr(doc, 'professional_field') else None,
             'validity': doc.validity if hasattr(doc, 'validity') else None,
             'project_name': doc.project_name if hasattr(doc, 'project_name') else None,
-            'project_section': doc.project_section if hasattr(doc, 'project_section') else None
+            'project_section': doc.project_section if hasattr(doc, 'project_section') else None,
+            'compilation_basis': doc.compilation_basis if hasattr(doc, 'compilation_basis') else None,
+            'plan_summary': doc.plan_summary if hasattr(doc, 'plan_summary') else None,
+            'plan_category': doc.plan_category if hasattr(doc, 'plan_category') else None,
+            'level_1_classification': doc.level_1_classification if hasattr(doc, 'level_1_classification') else None,
+            'level_2_classification': doc.level_2_classification if hasattr(doc, 'level_2_classification') else None,
+            'level_3_classification': doc.level_3_classification if hasattr(doc, 'level_3_classification') else None,
+            'level_4_classification': doc.level_4_classification if hasattr(doc, 'level_4_classification') else None
         }
         
-        success, message = await sample_service.edit_document(doc_data)
+        success, message = await sample_service.edit_document(doc_data, updater_id)
         
         if success:
             return ApiResponse(code=0, message=message, timestamp=datetime.now(timezone.utc).isoformat()).model_dump()
@@ -456,6 +537,11 @@ async def get_basic_info_list(
     issuing_authority: Optional[str] = None,
     release_date_start: Optional[str] = None,
     release_date_end: Optional[str] = None,
+    plan_category: Optional[str] = None,
+    level_1_classification: Optional[str] = None,
+    level_2_classification: Optional[str] = None,
+    level_3_classification: Optional[str] = None,
+    level_4_classification: Optional[str] = None,
     credentials: HTTPAuthorizationCredentials = Depends(security)
 ):
     """获取基本信息列表 (支持多条件检索)"""
@@ -484,6 +570,16 @@ async def get_basic_info_list(
             filters['release_date_start'] = release_date_start
         if release_date_end:
             filters['release_date_end'] = release_date_end
+        if plan_category:
+            filters['plan_category'] = plan_category
+        if level_1_classification:
+            filters['level_1_classification'] = level_1_classification
+        if level_2_classification:
+            filters['level_2_classification'] = level_2_classification
+        if level_3_classification:
+            filters['level_3_classification'] = level_3_classification
+        if level_4_classification:
+            filters['level_4_classification'] = level_4_classification
         
         items, total = await sample_service.get_basic_info_list(
             type=type,
@@ -514,10 +610,10 @@ async def add_basic_info(type: str, data: dict, credentials: HTTPAuthorizationCr
         
         user_id = payload.get("username", "admin")
         sample_service = SampleService()
-        success, message = await sample_service.add_basic_info(type, data, user_id)
+        success, message, doc_id = await sample_service.add_basic_info(type, data, user_id)
         
         if success:
-            return ApiResponse(code=0, message=message, timestamp=datetime.now(timezone.utc).isoformat()).model_dump()
+            return ApiResponse(code=0, message=message, data={"id": doc_id}, timestamp=datetime.now(timezone.utc).isoformat()).model_dump()
         else:
             return ApiResponse(code=500, message=message, timestamp=datetime.now(timezone.utc).isoformat()).model_dump()
     except Exception as e:
@@ -533,7 +629,7 @@ async def edit_basic_info(type: str, id: str, data: dict, credentials: HTTPAutho
             return ApiResponse(code=401, message="无效的访问令牌", timestamp=datetime.now(timezone.utc).isoformat()).model_dump()
         
         sample_service = SampleService()
-        success, message = await sample_service.edit_basic_info(type, id, data)
+        success, message = await sample_service.edit_basic_info(type, id, data, payload.get("username", "admin"))
         
         if success:
             return ApiResponse(code=0, message=message, timestamp=datetime.now(timezone.utc).isoformat()).model_dump()

Some files were not shown because too many files changed in this diff