2.20 主流程用子进程包裹,内部用多线程

This commit is contained in:
zy123 2025-02-20 16:34:06 +08:00
parent 638f21ac3a
commit 2f74f93f7c
3 changed files with 18 additions and 18 deletions

View File

@ -4,8 +4,8 @@ import json
import os import os
import time import time
from flask_app.general.format_change import download_file from flask_app.general.format_change import download_file
from flask_app.routes.工程标解析main import engineering_bid_main from flask_app.routes.工程标解析main import engineering_bid_main_process
from flask_app.routes.货物标解析main import goods_bid_main from flask_app.routes.货物标解析main import goods_bid_main_process
from flask_app.general.post_processing import outer_post_processing from flask_app.general.post_processing import outer_post_processing
from flask_app.routes.utils import generate_deviation_response, validate_and_setup_logger, create_response, sse_format, \ from flask_app.routes.utils import generate_deviation_response, validate_and_setup_logger, create_response, sse_format, \
log_error_unique_id log_error_unique_id
@ -77,10 +77,10 @@ def process_and_stream(file_url, zb_type):
good_list = None good_list = None
processing_functions = { processing_functions = {
1: engineering_bid_main, #工程标解析 1: engineering_bid_main_process, #工程标解析
2: goods_bid_main #货物标解析/服务标解析 2: goods_bid_main_process #货物标解析/服务标解析
} }
processing_func = processing_functions.get(zb_type, goods_bid_main) processing_func = processing_functions.get(zb_type, goods_bid_main_process)
for data in processing_func(output_folder, downloaded_filepath, file_type, unique_id): #逐一接收货物标 工程标解析内容,为前端网页展示服务 for data in processing_func(output_folder, downloaded_filepath, file_type, unique_id): #逐一接收货物标 工程标解析内容,为前端网页展示服务
if not data.strip(): if not data.strip():

View File

@ -23,7 +23,7 @@ from flask_app.general.商务技术评分提取 import combine_evaluation_standa
from flask_app.general.format_change import pdf2docx, docx2pdf from flask_app.general.format_change import pdf2docx, docx2pdf
def preprocess_file_main(output_folder, file_path, file_type, logger): def engineering_bid_main_process(output_folder, file_path, file_type, logger):
# 这里是你原本处理请求的地方 # 这里是你原本处理请求的地方
with multiprocessing.Pool(processes=1) as pool: with multiprocessing.Pool(processes=1) as pool:
result = pool.apply( result = pool.apply(
@ -242,7 +242,7 @@ def fetch_bid_opening(invalid_deleted_docx, merged_baseinfo_path_more, clause_pa
def engineering_bid_main(output_folder, file_path, file_type, unique_id): def engineering_bid_main(output_folder, file_path, file_type, unique_id):
logger = get_global_logger(unique_id) logger = get_global_logger(unique_id)
# 预处理文件,获取处理后的数据 # 预处理文件,获取处理后的数据
processed_data = preprocess_file_main(output_folder, file_path, file_type, logger) processed_data = preprocess_files(output_folder, file_path, file_type, logger)
if not processed_data: if not processed_data:
error_response = { error_response = {
'error': '文件预处理失败。请检查文件类型并重试。' 'error': '文件预处理失败。请检查文件类型并重试。'

View File

@ -16,12 +16,12 @@ from flask_app.general.无效标和废标公共代码 import combine_find_invali
from flask_app.货物标.资格审查main import combine_qualification_review from flask_app.货物标.资格审查main import combine_qualification_review
from flask_app.general.商务技术评分提取 import combine_evaluation_standards from flask_app.general.商务技术评分提取 import combine_evaluation_standards
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
def preprocess_file_main(output_folder, file_path, file_type,logger): def goods_bid_main_process(output_folder, file_path, file_type,unique_id):
# 这里是你原本处理请求的地方 # 这里是你原本处理请求的地方
with multiprocessing.Pool(processes=1) as pool: with multiprocessing.Pool(processes=1) as pool:
result = pool.apply( result = pool.apply(
preprocess_files, # 你的实际执行函数 goods_bid_main, # 你的实际执行函数
args=(output_folder, file_path, file_type, logger) args=(output_folder, file_path, file_type, unique_id)
) )
return result return result
def preprocess_files(output_folder, file_path, file_type,logger): def preprocess_files(output_folder, file_path, file_type,logger):
@ -234,7 +234,7 @@ def post_process_baseinfo(base_info,logger):
def goods_bid_main(output_folder, file_path, file_type, unique_id): def goods_bid_main(output_folder, file_path, file_type, unique_id):
logger = get_global_logger(unique_id) logger = get_global_logger(unique_id)
# 预处理文件,获取处理后的数据 # 预处理文件,获取处理后的数据
processed_data = preprocess_file_main(output_folder, file_path, file_type,logger) processed_data = preprocess_files(output_folder, file_path, file_type,logger)
if not processed_data: if not processed_data:
error_response = { error_response = {
'error': '文件预处理失败。请检查文件类型并重试。' 'error': '文件预处理失败。请检查文件类型并重试。'
@ -248,8 +248,8 @@ def goods_bid_main(output_folder, file_path, file_type, unique_id):
'evaluation_standards': executor.submit(fetch_evaluation_standards,processed_data['invalid_deleted_docx'], #技术评分 商务评分 'evaluation_standards': executor.submit(fetch_evaluation_standards,processed_data['invalid_deleted_docx'], #技术评分 商务评分
processed_data['evaluation_method_path'],logger), processed_data['evaluation_method_path'],logger),
# 'invalid_requirements': executor.submit(fetch_invalid_requirements, processed_data['invalid_added_docx'], #无效标与废标项 'invalid_requirements': executor.submit(fetch_invalid_requirements, processed_data['invalid_added_docx'], #无效标与废标项
# output_folder,logger), output_folder,logger),
'bidding_documents_requirements': executor.submit(fetch_bidding_documents_requirements,processed_data['invalid_deleted_docx'],processed_data['merged_baseinfo_path'], 'bidding_documents_requirements': executor.submit(fetch_bidding_documents_requirements,processed_data['invalid_deleted_docx'],processed_data['merged_baseinfo_path'],
processed_data['clause_path'],logger), #投标文件要求 processed_data['clause_path'],logger), #投标文件要求
@ -257,12 +257,12 @@ def goods_bid_main(output_folder, file_path, file_type, unique_id):
'opening_bid': executor.submit(fetch_bid_opening, processed_data['invalid_deleted_docx'],processed_data['merged_baseinfo_path'], 'opening_bid': executor.submit(fetch_bid_opening, processed_data['invalid_deleted_docx'],processed_data['merged_baseinfo_path'],
processed_data['clause_path'],logger), #开评定标流程 processed_data['clause_path'],logger), #开评定标流程
# 'base_info': executor.submit(fetch_project_basic_info, processed_data['invalid_deleted_docx'],processed_data['merged_baseinfo_path'], #基础信息 'base_info': executor.submit(fetch_project_basic_info, processed_data['invalid_deleted_docx'],processed_data['merged_baseinfo_path'], #基础信息
# processed_data['procurement_path'],processed_data['clause_path'],logger), processed_data['procurement_path'],processed_data['clause_path'],logger),
# 'qualification_review': executor.submit(fetch_qualification_review, processed_data['invalid_deleted_docx'], #资格审查 'qualification_review': executor.submit(fetch_qualification_review, processed_data['invalid_deleted_docx'], #资格审查
# processed_data['qualification_path'], processed_data['qualification_path'],
# processed_data['notice_path'],logger), processed_data['notice_path'],logger),
} }
collected_good_list = [] collected_good_list = []
# 提前处理这些不依赖的任务,按完成顺序返回 # 提前处理这些不依赖的任务,按完成顺序返回