# flask_app/routes/upload.py from flask import Blueprint, request, jsonify,g import json import os import time from flask_app.main.download import download_file from flask_app.routes.工程标解析main import engineering_bid_main from flask_app.routes.货物标解析main import goods_bid_main from flask_app.general.post_processing import outer_post_processing from flask_app.routes.utils import generate_deviation_response, validate_and_setup_logger from flask_app.ConnectionLimiter import require_connection_limit upload_bp = Blueprint('upload', __name__) @upload_bp.route('/upload', methods=['POST']) @validate_and_setup_logger @require_connection_limit(timeout=720) def zbparse(): logger = g.logger try: logger.info("大解析开始!!!") received_data = request.get_json() logger.info("Received JSON data: " + str(received_data)) file_url = g.file_url zb_type = g.zb_type try: logger.info("starting parsing url:" + file_url) return process_and_stream(file_url, zb_type) except Exception as e: logger.error('Exception occurred: ' + str(e)) return jsonify({'error': str(e)}), 500 except Exception as e: logger.error('Unexpected exception: ' + str(e)) return jsonify({'error': 'Internal server error'}), 500 def process_and_stream(file_url, zb_type): """ 下载文件并进行处理,支持工程标和货物标的处理。 """ logger = g.logger unique_id = g.unique_id output_folder = g.output_folder filename = "ztbfile" downloaded_filename = os.path.join(output_folder, filename) start_time = time.time() try: downloaded = download_file(file_url, downloaded_filename) if not downloaded: logger.error("下载文件失败或不支持的文件类型") error_response = { 'message': 'File processing failed', 'filename': '', 'data': json.dumps({'error': 'File processing failed'}) } yield f"data: {json.dumps(error_response)}\n\n" return downloaded_filepath, file_type = downloaded if file_type == 4: logger.error("不支持的文件类型") error_response = { 'message': 'Unsupported file type', 'filename': None, 'data': json.dumps({'error': 'Unsupported file type'}) } yield f"data: {json.dumps(error_response)}\n\n" return logger.info("本地文件路径: " + downloaded_filepath) combined_data = {} good_list = None processing_functions = { 1: engineering_bid_main, 2: goods_bid_main } processing_func = processing_functions.get(zb_type, engineering_bid_main) for data in processing_func(output_folder, downloaded_filepath, file_type, unique_id): if not data.strip(): logger.error("Received empty data, skipping JSON parsing.") continue try: parsed_data = json.loads(data) except json.JSONDecodeError as e: logger.error(f"Failed to decode JSON: {e}") logger.error(f"Data received: {data}") continue if 'good_list' in parsed_data: good_list = parsed_data['good_list'] logger.info("Collected good_list from the processing function: %s", good_list) continue for outer_key, inner_dict in parsed_data.items(): if isinstance(inner_dict, dict): combined_data.update(inner_dict) response = { 'message': 'Processing', 'filename': os.path.basename(downloaded_filepath), 'data': data } yield f"data: {json.dumps(response, ensure_ascii=False)}\n\n" base_end_time = time.time() logger.info(f"分段解析完成,耗时:{base_end_time - start_time:.2f} 秒") output_json_path = os.path.join(output_folder, 'final_result.json') extracted_info_path = os.path.join(output_folder, 'extracted_result.json') includes = ["基础信息", "资格审查", "商务评分", "技术评分", "无效标与废标项", "投标文件要求", "开评定标流程"] final_result, extracted_info, tech_deviation, tech_star_deviation, business_deviation, business_star_deviation, zigefuhe_deviation = outer_post_processing(combined_data, includes, good_list) tech_deviation_response, tech_deviation_star_response, zigefuhe_deviation_response, shangwu_deviation_response, shangwu_star_deviation_response = generate_deviation_response( tech_deviation, tech_star_deviation, business_deviation, business_star_deviation, zigefuhe_deviation, logger) yield f"data: {json.dumps(tech_deviation_response, ensure_ascii=False)}\n\n" yield f"data: {json.dumps(tech_deviation_star_response, ensure_ascii=False)}\n\n" yield f"data: {json.dumps(zigefuhe_deviation_response, ensure_ascii=False)}\n\n" yield f"data: {json.dumps(shangwu_deviation_response, ensure_ascii=False)}\n\n" yield f"data: {json.dumps(shangwu_star_deviation_response, ensure_ascii=False)}\n\n" try: with open(extracted_info_path, 'w', encoding='utf-8') as json_file: json.dump(extracted_info, json_file, ensure_ascii=False, indent=4) logger.info(f"摘取后的数据已保存到 '{extracted_info_path}'") except IOError as e: logger.error(f"保存JSON文件时出错: {e}") try: with open(output_json_path, 'w', encoding='utf-8') as json_file: json.dump(final_result, json_file, ensure_ascii=False, indent=4) logger.info(f"合并后的数据已保存到 '{output_json_path}'") except IOError as e: logger.error(f"保存JSON文件时出错: {e}") extracted_info_response = { 'message': 'extracted_info', 'filename': os.path.basename(downloaded_filepath), 'data': json.dumps(extracted_info, ensure_ascii=False) } yield f"data: {json.dumps(extracted_info_response, ensure_ascii=False)}\n\n" complete_response = { 'message': 'Combined_data', 'filename': os.path.basename(downloaded_filepath), 'data': json.dumps(final_result, ensure_ascii=False) } yield f"data: {json.dumps(complete_response, ensure_ascii=False)}\n\n" final_response = { 'message': 'File uploaded and processed successfully', 'filename': os.path.basename(downloaded_filepath), 'data': 'END' } yield f"data: {json.dumps(final_response)}\n\n" finally: end_time = time.time() duration = end_time - start_time logger.info(f"Total processing time: {duration:.2f} seconds")