2.20 主流程改为多线程测试

This commit is contained in:
zy123 2025-02-20 15:12:08 +08:00
parent e6b46b6949
commit 3e69e9fa9c
7 changed files with 77 additions and 54 deletions

View File

@ -1,11 +1,10 @@
# -*- encoding:utf-8 -*-
import json
import logging
import re
from flask_app.general.json_utils import clean_json_string
from flask_app.general.llm.多线程提问 import multi_threading
from flask_app.general.llm.通义千问long import upload_file, qianwen_long_stream
from flask_app.old_version.判断是否分包等_old import read_questions_from_judge
from flask_app.general.判断是否分包 import read_questions_from_judge
def get_deviation_requirements(invalid_path):
file_id=upload_file(invalid_path)

View File

@ -2,7 +2,7 @@ import json
from flask_app.general.json_utils import clean_json_string
from flask_app.工程标.投标人须知正文提取指定内容工程标 import extract_from_notice
from flask_app.old_version.判断是否分包等_old import judge_whether_main, read_questions_from_judge
from flask_app.general.判断是否分包 import judge_whether_main, read_questions_from_judge
from flask_app.general.llm.多线程提问 import read_questions_from_file, multi_threading
from flask_app.general.llm.通义千问long import upload_file
from flask_app.general.通用功能函数 import judge_consortium_bidding

View File

@ -31,6 +31,8 @@ def preprocess_file_main(output_folder, file_path, file_type,logger):
args=(output_folder, file_path, file_type, logger)
)
return result
def preprocess_files(output_folder, file_path, file_type, logger):
logger.info("starting 文件预处理...")
start_time = time.time()
@ -118,8 +120,10 @@ def preprocess_files(output_folder, file_path, file_type,logger):
'clause_path': clause_path
}
# 基本信息
def fetch_project_basic_info(invalid_deleted_docx, merged_baseinfo_path, merged_baseinfo_path_more,clause_path, logger):
def fetch_project_basic_info(invalid_deleted_docx, merged_baseinfo_path, merged_baseinfo_path_more, clause_path,
logger):
logger.info("starting 基础信息...")
start_time = time.time()
try:
@ -127,7 +131,8 @@ def fetch_project_basic_info(invalid_deleted_docx, merged_baseinfo_path, merged_
merged_baseinfo_path = invalid_deleted_docx
if not merged_baseinfo_path_more:
merged_baseinfo_path_more = invalid_deleted_docx
basic_res = combine_basic_info(merged_baseinfo_path, merged_baseinfo_path_more, clause_path,invalid_deleted_docx)
basic_res = combine_basic_info(merged_baseinfo_path, merged_baseinfo_path_more, clause_path,
invalid_deleted_docx)
result = basic_res
end_time = time.time()
logger.info(f"基础信息 done耗时{end_time - start_time:.2f}")
@ -138,7 +143,8 @@ def fetch_project_basic_info(invalid_deleted_docx, merged_baseinfo_path, merged_
return result
def fetch_qualification_review(evaluation_method, qualification, output_folder, tobidders_notice_table, clause_path, invalid_deleted_docx, merged_baseinfo_path, notice_path, logger):
def fetch_qualification_review(evaluation_method, qualification, output_folder, tobidders_notice_table, clause_path,
invalid_deleted_docx, merged_baseinfo_path, notice_path, logger):
logger.info("starting 资格审查...")
start_time = time.time()
try:
@ -149,7 +155,8 @@ def fetch_qualification_review(evaluation_method, qualification, output_folder,
if not merged_baseinfo_path:
merged_baseinfo_path = invalid_deleted_docx
review_standards_res = combine_review_standards(
evaluation_method, qualification, output_folder, tobidders_notice_table, clause_path, invalid_deleted_docx, merged_baseinfo_path, notice_path)
evaluation_method, qualification, output_folder, tobidders_notice_table, clause_path, invalid_deleted_docx,
merged_baseinfo_path, notice_path)
result = review_standards_res
end_time = time.time()
logger.info(f"资格审查 done耗时{end_time - start_time:.2f}")
@ -201,7 +208,8 @@ def fetch_bidding_documents_requirements(invalid_deleted_docx, merged_baseinfo_p
if not merged_baseinfo_path_more:
merged_baseinfo_path_more = invalid_deleted_docx
selection = 1
fetch_bidding_documents_requirements_json = extract_from_notice(merged_baseinfo_path_more, clause_path, selection)
fetch_bidding_documents_requirements_json = extract_from_notice(merged_baseinfo_path_more, clause_path,
selection)
result = {"投标文件要求": fetch_bidding_documents_requirements_json}
end_time = time.time()
logger.info(f"投标文件要求 done耗时{end_time - start_time:.2f}")
@ -211,6 +219,7 @@ def fetch_bidding_documents_requirements(invalid_deleted_docx, merged_baseinfo_p
result = {"投标文件要求": {}}
return result
# 开评定标流程
def fetch_bid_opening(invalid_deleted_docx, merged_baseinfo_path_more, clause_path, logger):
logger.info("starting 开评定标流程...")
@ -229,6 +238,7 @@ def fetch_bid_opening(invalid_deleted_docx, merged_baseinfo_path_more, clause_pa
result = {"开评定标流程": {}}
return result
def engineering_bid_main(output_folder, file_path, file_type, unique_id):
logger = get_global_logger(unique_id)
# 预处理文件,获取处理后的数据
@ -239,20 +249,31 @@ def engineering_bid_main(output_folder, file_path, file_type, unique_id):
}
yield json.dumps(error_response, ensure_ascii=False)
return # 停止进一步处理
with ProcessPoolExecutor() as executor:
with ThreadPoolExecutor() as executor:
# 立即启动不依赖 knowledge_name 和 index 的任务
futures = {
'base_info': executor.submit(fetch_project_basic_info,processed_data['invalid_deleted_docx'] ,processed_data['merged_baseinfo_path'],processed_data['merged_baseinfo_path_more'],
'base_info': executor.submit(fetch_project_basic_info, processed_data['invalid_deleted_docx'],
processed_data['merged_baseinfo_path'],
processed_data['merged_baseinfo_path_more'],
processed_data['clause_path'], logger),
'qualification_review': executor.submit(fetch_qualification_review, processed_data['evaluation_method'],
processed_data['qualification'], output_folder,
processed_data['tobidders_notice_table'],
processed_data['clause_path'], processed_data['invalid_deleted_docx'],
processed_data['merged_baseinfo_path'],processed_data['notice_path'],logger),
'evaluation_standards': executor.submit(fetch_evaluation_standards, processed_data['invalid_deleted_docx'],processed_data['evaluation_method'],logger),
'invalid_requirements': executor.submit(fetch_invalid_requirements, processed_data['invalid_added_docx'],output_folder,logger),
'bidding_documents_requirements': executor.submit(fetch_bidding_documents_requirements,processed_data['invalid_deleted_docx'], processed_data['merged_baseinfo_path_more'],processed_data['clause_path'],logger),
'opening_bid': executor.submit(fetch_bid_opening,processed_data['invalid_deleted_docx'],processed_data['merged_baseinfo_path_more'], processed_data['clause_path'],logger)
processed_data['clause_path'],
processed_data['invalid_deleted_docx'],
processed_data['merged_baseinfo_path'],
processed_data['notice_path'], logger),
'evaluation_standards': executor.submit(fetch_evaluation_standards, processed_data['invalid_deleted_docx'],
processed_data['evaluation_method'], logger),
'invalid_requirements': executor.submit(fetch_invalid_requirements, processed_data['invalid_added_docx'],
output_folder, logger),
'bidding_documents_requirements': executor.submit(fetch_bidding_documents_requirements,
processed_data['invalid_deleted_docx'],
processed_data['merged_baseinfo_path_more'],
processed_data['clause_path'], logger),
'opening_bid': executor.submit(fetch_bid_opening, processed_data['invalid_deleted_docx'],
processed_data['merged_baseinfo_path_more'], processed_data['clause_path'],
logger)
}
# 提前处理这些不依赖的任务,按完成顺序返回
@ -267,8 +288,10 @@ def engineering_bid_main(output_folder, file_path, file_type, unique_id):
commercial_standards = result["commercial_standards"]
# 分别返回技术标和商务标
yield json.dumps({'technical_standards': transform_json_values(technical_standards)}, ensure_ascii=False)
yield json.dumps({'commercial_standards': transform_json_values(commercial_standards)}, ensure_ascii=False)
yield json.dumps({'technical_standards': transform_json_values(technical_standards)},
ensure_ascii=False)
yield json.dumps({'commercial_standards': transform_json_values(commercial_standards)},
ensure_ascii=False)
else:
# 处理其他任务的结果
@ -285,6 +308,7 @@ def engineering_bid_main(output_folder, file_path, file_type, unique_id):
yield json.dumps(default_evaluation, ensure_ascii=False)
# yield json.dumps({'error': f'Error processing {key}: {str(exc)}'}, ensure_ascii=False)
# TODO:基本信息,判断是否这里,打勾逻辑取消了。
if __name__ == "__main__":
start_time = time.time()

View File

@ -15,7 +15,7 @@ from flask_app.general.投标人须知正文条款提取成json文件 import con
from flask_app.general.无效标和废标公共代码 import combine_find_invalid
from flask_app.货物标.资格审查main import combine_qualification_review
from flask_app.general.商务技术评分提取 import combine_evaluation_standards
from concurrent.futures import ThreadPoolExecutor
def preprocess_file_main(output_folder, file_path, file_type,logger):
# 这里是你原本处理请求的地方
with multiprocessing.Pool(processes=1) as pool:
@ -241,11 +241,11 @@ def goods_bid_main(output_folder, file_path, file_type, unique_id):
}
yield json.dumps(error_response, ensure_ascii=False)
return # 停止进一步处理
with ProcessPoolExecutor() as executor: #开启子进程,能保证运行结束后回收资源消耗
with ThreadPoolExecutor() as executor: #开启子进程,能保证运行结束后回收资源消耗
# 立即启动不依赖 knowledge_name 和 index 的任务
futures = {
'evaluation_standards': executor.submit(fetch_evaluation_standards,processed_data['invalid_deleted_docx'], #技术评分 商务评分
processed_data['evaluation_method_path'],logger),
# 'evaluation_standards': executor.submit(fetch_evaluation_standards,processed_data['invalid_deleted_docx'], #技术评分 商务评分
# processed_data['evaluation_method_path'],logger),
'invalid_requirements': executor.submit(fetch_invalid_requirements, processed_data['invalid_added_docx'], #无效标与废标项
output_folder,logger),
@ -256,12 +256,12 @@ def goods_bid_main(output_folder, file_path, file_type, unique_id):
'opening_bid': executor.submit(fetch_bid_opening, processed_data['invalid_deleted_docx'],processed_data['merged_baseinfo_path'],
processed_data['clause_path'],logger), #开评定标流程
'base_info': executor.submit(fetch_project_basic_info, processed_data['invalid_deleted_docx'],processed_data['merged_baseinfo_path'], #基础信息
processed_data['procurement_path'],processed_data['clause_path'],logger),
'qualification_review': executor.submit(fetch_qualification_review, processed_data['invalid_deleted_docx'], #资格审查
processed_data['qualification_path'],
processed_data['notice_path'],logger),
# 'base_info': executor.submit(fetch_project_basic_info, processed_data['invalid_deleted_docx'],processed_data['merged_baseinfo_path'], #基础信息
# processed_data['procurement_path'],processed_data['clause_path'],logger),
#
# 'qualification_review': executor.submit(fetch_qualification_review, processed_data['invalid_deleted_docx'], #资格审查
# processed_data['qualification_path'],
# processed_data['notice_path'],logger),
}
collected_good_list = []
# 提前处理这些不依赖的任务,按完成顺序返回

View File

@ -5,7 +5,7 @@ import concurrent.futures
from flask_app.general.json_utils import clean_json_string, add_outer_key
from flask_app.general.通用功能函数 import process_judge_questions, aggregate_basic_info
from flask_app.general.投标人须知正文提取指定内容 import extract_from_notice
from flask_app.old_version.判断是否分包等_old import merge_json_to_list
from flask_app.general.判断是否分包 import merge_json_to_list
from flask_app.general.llm.多线程提问 import read_questions_from_file, multi_threading
from flask_app.general.llm.通义千问long import upload_file

View File

@ -7,7 +7,7 @@ from flask_app.general.json_utils import clean_json_string, add_outer_key
from flask_app.general.通用功能函数 import process_judge_questions, aggregate_basic_info
from flask_app.general.llm.多线程提问 import read_questions_from_file, multi_threading
from flask_app.general.llm.通义千问long import upload_file
from flask_app.old_version.判断是否分包等_old import merge_json_to_list
from flask_app.general.判断是否分包 import merge_json_to_list
from flask_app.general.投标人须知正文提取指定内容 import extract_from_notice
from flask_app.货物标.提取采购需求main import fetch_procurement_reqs