2024-11-23 17:50:32 +08:00
|
|
|
|
# flask_app/routes/upload.py
|
2024-11-25 09:15:56 +08:00
|
|
|
|
from functools import wraps
|
2024-11-23 17:50:32 +08:00
|
|
|
|
|
2024-11-25 09:15:56 +08:00
|
|
|
|
from flask import Blueprint, request, jsonify, Response, stream_with_context, g, current_app
|
2024-11-23 17:50:32 +08:00
|
|
|
|
import json
|
|
|
|
|
import os
|
|
|
|
|
import time
|
|
|
|
|
|
|
|
|
|
from flask_app.main.download import download_file
|
|
|
|
|
from flask_app.main.工程标解析main import engineering_bid_main
|
|
|
|
|
from flask_app.货物标.货物标解析main import goods_bid_main
|
|
|
|
|
from flask_app.general.post_processing import outer_post_processing
|
|
|
|
|
from flask_app.general.接口_技术偏离表 import get_tech_and_business_deviation
|
|
|
|
|
from flask_app.routes.utils import generate_deviation_response, validate_request
|
|
|
|
|
from flask_app.logger_setup import CSTFormatter
|
|
|
|
|
|
|
|
|
|
upload_bp = Blueprint('upload', __name__)
|
2024-11-25 09:15:56 +08:00
|
|
|
|
|
|
|
|
|
def require_connection_limit():
|
|
|
|
|
"""装饰器:用于确保路由使用连接限制"""
|
|
|
|
|
def decorator(f):
|
|
|
|
|
@wraps(f)
|
|
|
|
|
def wrapped(*args, **kwargs):
|
|
|
|
|
limiter = current_app.connection_limiter
|
2024-11-25 10:13:39 +08:00
|
|
|
|
limiter.semaphore.acquire() # 阻塞式获取信号量
|
|
|
|
|
try:
|
|
|
|
|
return f(*args, **kwargs)
|
|
|
|
|
finally:
|
|
|
|
|
limiter.semaphore.release()
|
2024-11-25 09:15:56 +08:00
|
|
|
|
return wrapped
|
|
|
|
|
return decorator
|
2024-11-23 17:50:32 +08:00
|
|
|
|
@upload_bp.route('/upload', methods=['POST'])
|
2024-11-25 10:13:39 +08:00
|
|
|
|
@require_connection_limit()
|
2024-11-23 17:50:32 +08:00
|
|
|
|
def zbparse():
|
|
|
|
|
try:
|
2024-11-25 09:15:56 +08:00
|
|
|
|
logger = g.logger
|
|
|
|
|
logger.info("zbparse start!!!")
|
|
|
|
|
received_data = request.get_json()
|
|
|
|
|
logger.info("Received JSON data: " + str(received_data))
|
|
|
|
|
validation = validate_request()
|
|
|
|
|
|
|
|
|
|
if isinstance(validation, tuple) and len(validation) == 2 and isinstance(validation[0], str):
|
|
|
|
|
file_url, zb_type = validation
|
|
|
|
|
else:
|
|
|
|
|
return validation # 错误响应
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
logger.info("starting parsing url:" + file_url)
|
|
|
|
|
return Response(stream_with_context(process_and_stream(file_url, zb_type)),
|
|
|
|
|
content_type='text/event-stream')
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error('Exception occurred: ' + str(e))
|
|
|
|
|
return jsonify({'error': str(e)}), 500
|
2024-11-25 10:13:39 +08:00
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error('Unexpected exception: ' + str(e))
|
|
|
|
|
return jsonify({'error': 'Internal server error'}), 500
|
2024-11-23 17:50:32 +08:00
|
|
|
|
|
|
|
|
|
def process_and_stream(file_url, zb_type):
|
|
|
|
|
"""
|
|
|
|
|
下载文件并进行处理,支持工程标和货物标的处理。
|
|
|
|
|
"""
|
|
|
|
|
logger = g.logger
|
|
|
|
|
unique_id = g.unique_id
|
|
|
|
|
output_folder = g.output_folder
|
|
|
|
|
filename = "ztbfile"
|
|
|
|
|
downloaded_filename = os.path.join(output_folder, filename)
|
|
|
|
|
start_time = time.time()
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
downloaded = download_file(file_url, downloaded_filename)
|
|
|
|
|
if not downloaded:
|
|
|
|
|
logger.error("下载文件失败或不支持的文件类型")
|
|
|
|
|
error_response = {
|
|
|
|
|
'message': 'File processing failed',
|
|
|
|
|
'filename': '',
|
|
|
|
|
'data': json.dumps({'error': 'File processing failed'})
|
|
|
|
|
}
|
|
|
|
|
yield f"data: {json.dumps(error_response)}\n\n"
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
downloaded_filepath, file_type = downloaded
|
|
|
|
|
|
|
|
|
|
if file_type == 4:
|
|
|
|
|
logger.error("不支持的文件类型")
|
|
|
|
|
error_response = {
|
|
|
|
|
'message': 'Unsupported file type',
|
|
|
|
|
'filename': None,
|
|
|
|
|
'data': json.dumps({'error': 'Unsupported file type'})
|
|
|
|
|
}
|
|
|
|
|
yield f"data: {json.dumps(error_response)}\n\n"
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
logger.info("本地文件路径: " + downloaded_filepath)
|
|
|
|
|
|
|
|
|
|
combined_data = {}
|
|
|
|
|
good_list = None
|
|
|
|
|
|
|
|
|
|
processing_functions = {
|
|
|
|
|
1: engineering_bid_main,
|
|
|
|
|
2: goods_bid_main
|
|
|
|
|
}
|
|
|
|
|
processing_func = processing_functions.get(zb_type, engineering_bid_main)
|
|
|
|
|
|
|
|
|
|
for data in processing_func(output_folder, downloaded_filepath, file_type, unique_id):
|
|
|
|
|
if not data.strip():
|
|
|
|
|
logger.error("Received empty data, skipping JSON parsing.")
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
parsed_data = json.loads(data)
|
|
|
|
|
except json.JSONDecodeError as e:
|
|
|
|
|
logger.error(f"Failed to decode JSON: {e}")
|
|
|
|
|
logger.error(f"Data received: {data}")
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
if 'good_list' in parsed_data:
|
|
|
|
|
good_list = parsed_data['good_list']
|
|
|
|
|
logger.info("Collected good_list from the processing function: %s", good_list)
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
for outer_key, inner_dict in parsed_data.items():
|
|
|
|
|
if isinstance(inner_dict, dict):
|
|
|
|
|
combined_data.update(inner_dict)
|
|
|
|
|
|
|
|
|
|
response = {
|
|
|
|
|
'message': 'Processing',
|
|
|
|
|
'filename': os.path.basename(downloaded_filepath),
|
|
|
|
|
'data': data
|
|
|
|
|
}
|
|
|
|
|
yield f"data: {json.dumps(response, ensure_ascii=False)}\n\n"
|
|
|
|
|
|
|
|
|
|
base_end_time = time.time()
|
|
|
|
|
logger.info(f"分段解析完成,耗时:{base_end_time - start_time:.2f} 秒")
|
|
|
|
|
|
|
|
|
|
output_json_path = os.path.join(output_folder, 'final_result.json')
|
|
|
|
|
extracted_info_path = os.path.join(output_folder, 'extracted_result.json')
|
|
|
|
|
includes = ["基础信息", "资格审查", "商务评分", "技术评分", "无效标与废标项", "投标文件要求", "开评定标流程"]
|
|
|
|
|
final_result, extracted_info, tech_deviation, tech_star_deviation, business_deviation, business_star_deviation, zigefuhe_deviation = outer_post_processing(combined_data, includes, good_list)
|
|
|
|
|
|
|
|
|
|
tech_deviation_response, tech_deviation_star_response, zigefuhe_deviation_response, shangwu_deviation_response, shangwu_star_deviation_response = generate_deviation_response(
|
|
|
|
|
tech_deviation, tech_star_deviation, business_deviation, business_star_deviation, zigefuhe_deviation, logger)
|
|
|
|
|
yield f"data: {json.dumps(tech_deviation_response, ensure_ascii=False)}\n\n"
|
|
|
|
|
yield f"data: {json.dumps(tech_deviation_star_response, ensure_ascii=False)}\n\n"
|
|
|
|
|
yield f"data: {json.dumps(zigefuhe_deviation_response, ensure_ascii=False)}\n\n"
|
|
|
|
|
yield f"data: {json.dumps(shangwu_deviation_response, ensure_ascii=False)}\n\n"
|
|
|
|
|
yield f"data: {json.dumps(shangwu_star_deviation_response, ensure_ascii=False)}\n\n"
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
with open(extracted_info_path, 'w', encoding='utf-8') as json_file:
|
|
|
|
|
json.dump(extracted_info, json_file, ensure_ascii=False, indent=4)
|
|
|
|
|
logger.info(f"摘取后的数据已保存到 '{extracted_info_path}'")
|
|
|
|
|
except IOError as e:
|
|
|
|
|
logger.error(f"保存JSON文件时出错: {e}")
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
with open(output_json_path, 'w', encoding='utf-8') as json_file:
|
|
|
|
|
json.dump(final_result, json_file, ensure_ascii=False, indent=4)
|
|
|
|
|
logger.info(f"合并后的数据已保存到 '{output_json_path}'")
|
|
|
|
|
except IOError as e:
|
|
|
|
|
logger.error(f"保存JSON文件时出错: {e}")
|
|
|
|
|
|
|
|
|
|
extracted_info_response = {
|
|
|
|
|
'message': 'extracted_info',
|
|
|
|
|
'filename': os.path.basename(downloaded_filepath),
|
|
|
|
|
'data': json.dumps(extracted_info, ensure_ascii=False)
|
|
|
|
|
}
|
|
|
|
|
yield f"data: {json.dumps(extracted_info_response, ensure_ascii=False)}\n\n"
|
|
|
|
|
|
|
|
|
|
complete_response = {
|
|
|
|
|
'message': 'Combined_data',
|
|
|
|
|
'filename': os.path.basename(downloaded_filepath),
|
|
|
|
|
'data': json.dumps(final_result, ensure_ascii=False)
|
|
|
|
|
}
|
|
|
|
|
yield f"data: {json.dumps(complete_response, ensure_ascii=False)}\n\n"
|
|
|
|
|
|
|
|
|
|
final_response = {
|
|
|
|
|
'message': 'File uploaded and processed successfully',
|
|
|
|
|
'filename': os.path.basename(downloaded_filepath),
|
|
|
|
|
'data': 'END'
|
|
|
|
|
}
|
|
|
|
|
yield f"data: {json.dumps(final_response)}\n\n"
|
|
|
|
|
|
|
|
|
|
finally:
|
|
|
|
|
end_time = time.time()
|
|
|
|
|
duration = end_time - start_time
|
|
|
|
|
logger.info(f"Total processing time: {duration:.2f} seconds")
|