11.23 确保了系统下层级不超过2
This commit is contained in:
parent
1c542377a9
commit
2151bdd485
49
flask_app/logger_setup.py
Normal file
49
flask_app/logger_setup.py
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
# flask_app/logger_setup.py
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from flask import g
|
||||||
|
import os
|
||||||
|
|
||||||
|
class CSTFormatter(logging.Formatter):
|
||||||
|
"""自定义的 Formatter,将日志的时间戳调整为中国标准时间(UTC+8)"""
|
||||||
|
|
||||||
|
def formatTime(self, record, datefmt=None):
|
||||||
|
ct = datetime.fromtimestamp(record.created) + timedelta(hours=8)
|
||||||
|
if datefmt:
|
||||||
|
s = ct.strftime(datefmt)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
s = ct.strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
if self.usesTime():
|
||||||
|
s = f"{s},{record.msecs:03d}"
|
||||||
|
except ValueError:
|
||||||
|
s = ct.strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
return s
|
||||||
|
|
||||||
|
def create_logger(app, subfolder):
|
||||||
|
"""
|
||||||
|
创建一个唯一的 logger 和对应的输出文件夹。
|
||||||
|
|
||||||
|
参数:
|
||||||
|
subfolder (str): 子文件夹名称,如 'output1', 'output2', 'output3'
|
||||||
|
"""
|
||||||
|
unique_id = str(uuid.uuid4())
|
||||||
|
g.unique_id = unique_id
|
||||||
|
output_folder = os.path.join("flask_app", "static", "output", subfolder, unique_id)
|
||||||
|
os.makedirs(output_folder, exist_ok=True)
|
||||||
|
log_filename = "log.txt"
|
||||||
|
log_path = os.path.join(output_folder, log_filename)
|
||||||
|
logger = logging.getLogger(unique_id)
|
||||||
|
if not logger.handlers:
|
||||||
|
file_handler = logging.FileHandler(log_path)
|
||||||
|
file_formatter = CSTFormatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||||
|
file_handler.setFormatter(file_formatter)
|
||||||
|
logger.addHandler(file_handler)
|
||||||
|
stream_handler = logging.StreamHandler()
|
||||||
|
stream_handler.setFormatter(logging.Formatter('%(message)s'))
|
||||||
|
logger.addHandler(stream_handler)
|
||||||
|
logger.setLevel(logging.INFO)
|
||||||
|
g.logger = logger
|
||||||
|
g.output_folder = output_folder
|
@ -580,8 +580,6 @@ def truncate_pdf_specific_engineering(pdf_path, output_folder, selections, uniqu
|
|||||||
logger.error(f"Error in truncate_pdf_specific_engineering: {e}")
|
logger.error(f"Error in truncate_pdf_specific_engineering: {e}")
|
||||||
return [""] * len(selections) # 返回与 selections 数量相同的空字符串列表
|
return [""] * len(selections) # 返回与 selections 数量相同的空字符串列表
|
||||||
|
|
||||||
# TODO:目前merged_baseinfo没有包含投标人须知正文。
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
# input_path = "C:\\Users\\Administrator\\Desktop\\new招标文件\\工程标"
|
# input_path = "C:\\Users\\Administrator\\Desktop\\new招标文件\\工程标"
|
||||||
|
85
flask_app/routes/get_deviation.py
Normal file
85
flask_app/routes/get_deviation.py
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
# flask_app/routes/get_deviation.py
|
||||||
|
|
||||||
|
from flask import Blueprint, request, jsonify, Response, stream_with_context, g
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
from flask_app.main.download import download_file
|
||||||
|
from flask_app.general.post_processing import outer_post_processing
|
||||||
|
from flask_app.general.接口_技术偏离表 import get_tech_and_business_deviation
|
||||||
|
|
||||||
|
from flask_app.logger_setup import CSTFormatter
|
||||||
|
|
||||||
|
from flask_app.routes.utils import generate_deviation_response, validate_request
|
||||||
|
|
||||||
|
get_deviation_bp = Blueprint('get_deviation', __name__)
|
||||||
|
@get_deviation_bp.route('/get_deviation', methods=['POST'])
|
||||||
|
def get_deviation():
|
||||||
|
logger = g.logger
|
||||||
|
unique_id = g.unique_id
|
||||||
|
validation = validate_request()
|
||||||
|
|
||||||
|
if isinstance(validation, tuple) and len(validation) == 2 and isinstance(validation[0], str):
|
||||||
|
file_url, zb_type = validation
|
||||||
|
else:
|
||||||
|
return validation # 错误响应
|
||||||
|
|
||||||
|
try:
|
||||||
|
logger.info("开始解析 URL: " + file_url)
|
||||||
|
if zb_type != 2:
|
||||||
|
logger.error(f"无效的 zb_type: {zb_type}. 期望 zb_type: 2")
|
||||||
|
return jsonify({
|
||||||
|
'error': 'Invalid zb_type',
|
||||||
|
'message': '此端点仅支持 zb_type 2 (采购需求)'
|
||||||
|
}), 400
|
||||||
|
else:
|
||||||
|
tech_deviation, tech_star_deviation, business_deviation, business_star_deviation, zigefuhe_deviation = download_and_process_file_for_deviation(
|
||||||
|
file_url, unique_id)
|
||||||
|
if tech_deviation is None:
|
||||||
|
return jsonify({'error': 'File processing failed'}), 500
|
||||||
|
tech_deviation_response, tech_deviation_star_response, zigefuhe_deviation_response, shangwu_deviation_response, shangwu_star_deviation_response = generate_deviation_response(
|
||||||
|
tech_deviation, tech_star_deviation, business_deviation, business_star_deviation, zigefuhe_deviation,
|
||||||
|
logger)
|
||||||
|
|
||||||
|
final_response = {
|
||||||
|
'message': 'processed successfully',
|
||||||
|
'filename': 'END',
|
||||||
|
'data': 'END'
|
||||||
|
}
|
||||||
|
|
||||||
|
@stream_with_context
|
||||||
|
def generate():
|
||||||
|
yield f"data: {json.dumps(tech_deviation_response, ensure_ascii=False)}\n\n"
|
||||||
|
yield f"data: {json.dumps(tech_deviation_star_response, ensure_ascii=False)}\n\n"
|
||||||
|
yield f"data: {json.dumps(zigefuhe_deviation_response, ensure_ascii=False)}\n\n"
|
||||||
|
yield f"data: {json.dumps(shangwu_deviation_response, ensure_ascii=False)}\n\n"
|
||||||
|
yield f"data: {json.dumps(shangwu_star_deviation_response, ensure_ascii=False)}\n\n"
|
||||||
|
yield f"data: {json.dumps(final_response, ensure_ascii=False)}\n\n"
|
||||||
|
|
||||||
|
return Response(generate(), mimetype='text/event-stream')
|
||||||
|
except Exception as e:
|
||||||
|
logger.error('发生异常: ' + str(e))
|
||||||
|
return jsonify({'error': str(e)}), 500
|
||||||
|
|
||||||
|
|
||||||
|
def download_and_process_file_for_deviation(file_url, unique_id):
|
||||||
|
"""
|
||||||
|
下载并处理采购需求文件。
|
||||||
|
|
||||||
|
参数:
|
||||||
|
file_url (str): 文件的URL地址。
|
||||||
|
|
||||||
|
返回:
|
||||||
|
tuple: (tech_deviation, tech_star_deviation, business_deviation, business_star_deviation, zigefuhe_deviation)
|
||||||
|
"""
|
||||||
|
logger = g.logger
|
||||||
|
output_folder = g.output_folder
|
||||||
|
filename = "ztbfile"
|
||||||
|
downloaded_filename = os.path.join(output_folder, filename)
|
||||||
|
downloaded_filepath, file_type = download_file(file_url, downloaded_filename)
|
||||||
|
if downloaded_filepath is None or file_type == 4:
|
||||||
|
logger.error("Unsupported file type or failed to download file")
|
||||||
|
return None, None, None, None, None
|
||||||
|
logger.info("Local file path: " + downloaded_filepath)
|
||||||
|
tech_deviation, tech_star_deviation, business_deviation, business_star_deviation, zigefuhe_deviation = get_tech_and_business_deviation(
|
||||||
|
downloaded_filepath, file_type, unique_id, output_folder)
|
||||||
|
return tech_deviation, tech_star_deviation, business_deviation, business_star_deviation, zigefuhe_deviation
|
76
flask_app/routes/little_zbparse.py
Normal file
76
flask_app/routes/little_zbparse.py
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
# flask_app/routes/little_zbparse.py
|
||||||
|
|
||||||
|
from flask import Blueprint, request, jsonify, Response, stream_with_context, g
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
|
||||||
|
from flask_app.main.download import download_file
|
||||||
|
from flask_app.general.post_processing import outer_post_processing
|
||||||
|
from flask_app.general.接口_小解析 import little_parse_main
|
||||||
|
|
||||||
|
from flask_app.logger_setup import CSTFormatter
|
||||||
|
|
||||||
|
from flask_app.routes.utils import validate_request
|
||||||
|
|
||||||
|
little_zbparse_bp = Blueprint('little_zbparse', __name__)
|
||||||
|
@little_zbparse_bp.route('/little_zbparse', methods=['POST'])
|
||||||
|
def little_zbparse():
|
||||||
|
logger = g.logger
|
||||||
|
file_url, zb_type = validate_request()
|
||||||
|
|
||||||
|
if isinstance(file_url, tuple): # 检查是否为错误响应
|
||||||
|
return file_url
|
||||||
|
|
||||||
|
try:
|
||||||
|
logger.info("starting parsing url:" + file_url)
|
||||||
|
final_json_path = download_and_process_file(file_url, zb_type)
|
||||||
|
if not final_json_path:
|
||||||
|
return jsonify({'error': 'File processing failed'}), 500
|
||||||
|
response = generate_response(final_json_path)
|
||||||
|
return response
|
||||||
|
except Exception as e:
|
||||||
|
logger.error('Exception occurred: ' + str(e))
|
||||||
|
return jsonify({'error': str(e)}), 500
|
||||||
|
|
||||||
|
def download_and_process_file(file_url, zb_type):
|
||||||
|
"""
|
||||||
|
下载并处理文件,根据zb_type选择处理函数。
|
||||||
|
|
||||||
|
参数:
|
||||||
|
file_url (str): 文件的URL地址。
|
||||||
|
zb_type (int): 标的类型,1表示工程标,2表示货物标。
|
||||||
|
|
||||||
|
返回:
|
||||||
|
str: 处理后的文件路径。
|
||||||
|
"""
|
||||||
|
logger = g.logger
|
||||||
|
output_folder = g.output_folder
|
||||||
|
filename = "ztbfile"
|
||||||
|
downloaded_filename = os.path.join(output_folder, filename)
|
||||||
|
|
||||||
|
downloaded_filepath, file_type = download_file(file_url, downloaded_filename)
|
||||||
|
|
||||||
|
if downloaded_filepath is None or file_type == 4:
|
||||||
|
logger.error("Unsupported file type or failed to download file")
|
||||||
|
return None
|
||||||
|
|
||||||
|
logger.info("Local file path: " + downloaded_filepath)
|
||||||
|
processed_file_path = little_parse_main(output_folder, downloaded_filepath, file_type, zb_type, g.unique_id)
|
||||||
|
return processed_file_path
|
||||||
|
|
||||||
|
def generate_response(final_json_path):
|
||||||
|
logger = g.logger
|
||||||
|
if not final_json_path:
|
||||||
|
logger.error('Empty or None path provided for final_json.')
|
||||||
|
return jsonify({'error': 'No path provided for final_json.'}), 400
|
||||||
|
if not os.path.exists(final_json_path):
|
||||||
|
logger.error('final_json not found at path: ' + final_json_path)
|
||||||
|
return jsonify({'error': 'final_json not found'}), 404
|
||||||
|
with open(final_json_path, 'r', encoding='utf-8') as f:
|
||||||
|
zbparse_data = json.load(f)
|
||||||
|
json_str = json.dumps(zbparse_data, ensure_ascii=False)
|
||||||
|
return jsonify({
|
||||||
|
'message': 'Little Parse processed successfully',
|
||||||
|
'filename': os.path.basename(final_json_path),
|
||||||
|
'data': json_str
|
||||||
|
})
|
201
flask_app/routes/test_zbparse.py
Normal file
201
flask_app/routes/test_zbparse.py
Normal file
@ -0,0 +1,201 @@
|
|||||||
|
# flask_app/routes/test_zbparse.py
|
||||||
|
|
||||||
|
from flask import Blueprint, Response, stream_with_context
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
|
||||||
|
test_zbparse_bp = Blueprint('test_zbparse', __name__)
|
||||||
|
|
||||||
|
@test_zbparse_bp.route('/api/test_zbparse', methods=['POST'])
|
||||||
|
def test_zbparse():
|
||||||
|
try:
|
||||||
|
return Response(stream_with_context(test_process_and_stream()), content_type='text/event-stream')
|
||||||
|
except Exception as e:
|
||||||
|
# 使用 Flask 应用的日志记录器
|
||||||
|
current_app.logger.error('Exception occurred: ' + str(e))
|
||||||
|
return jsonify({'error': str(e)}), 500
|
||||||
|
|
||||||
|
def test_process_and_stream():
|
||||||
|
data_segments = [
|
||||||
|
{
|
||||||
|
"base_info": {
|
||||||
|
"基础信息": {
|
||||||
|
"project_name": "测试项目1",
|
||||||
|
"project_code": "TP001",
|
||||||
|
"project_manager": "张三",
|
||||||
|
"start_date": "2024-01-10",
|
||||||
|
"end_date": "2024-12-31"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"qualification_review": {
|
||||||
|
"资格审查": {
|
||||||
|
"review_criteria": ["公司资质", "过往业绩", "财务报表"],
|
||||||
|
"required_documents": ["营业执照", "资质证书", "近三年财务报告"],
|
||||||
|
"minimum_requirements": {
|
||||||
|
"company_age": "至少5年",
|
||||||
|
"past_projects": "至少3个大型项目"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"technical_standards": {
|
||||||
|
"技术标": {
|
||||||
|
"technical_requirements": ["设备质量要求", "施工工艺", "安全标准"],
|
||||||
|
"materials_list": ["钢筋", "水泥", "电缆"],
|
||||||
|
"equipment_specs": {
|
||||||
|
"excavator": "型号X123",
|
||||||
|
"concrete_mixer": "型号Y456"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"commercial_standards": {
|
||||||
|
"商务标": {
|
||||||
|
"pricing_method": "固定总价",
|
||||||
|
"payment_schedule": ["30%合同签订", "40%中期支付", "30%项目完成支付"],
|
||||||
|
"contract_conditions": {
|
||||||
|
"warranty_period": "2年",
|
||||||
|
"penalty_clauses": "延期每周罚款5%"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"invalid_requirements": {
|
||||||
|
"无效标与废标项": {
|
||||||
|
"common_issues": ["未按要求提交保证金", "技术标不达标"],
|
||||||
|
"invalidation_reasons": {
|
||||||
|
"missing_documents": "缺少必要文件",
|
||||||
|
"unqualified_technical_specs": "技术规格不合要求"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"bidding_documents_requirements": {
|
||||||
|
"投标文件要求": {
|
||||||
|
"file_format": "PDF",
|
||||||
|
"submission_deadline": "2024-08-01 17:00",
|
||||||
|
"submission_location": "北京市某某大厦5楼",
|
||||||
|
"required_sections": ["公司简介", "技术方案", "商务报价"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"opening_bid": {
|
||||||
|
"开评定标流程": {
|
||||||
|
"bid_opening_time": "2024-09-01 10:00",
|
||||||
|
"location": "会议室A",
|
||||||
|
"evaluation_criteria": ["价格", "技术能力", "项目经验"],
|
||||||
|
"evaluation_process": {
|
||||||
|
"first_round": "资格审查",
|
||||||
|
"second_round": "技术评分",
|
||||||
|
"final_round": "商务报价评定"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
filename = "test_file.pdf"
|
||||||
|
|
||||||
|
for i, data in enumerate(data_segments, 1):
|
||||||
|
response = {
|
||||||
|
'message': f'Processing segment {i}',
|
||||||
|
'filename': filename,
|
||||||
|
'data': data
|
||||||
|
}
|
||||||
|
yield f"data: {json.dumps(response, ensure_ascii=False)}\n\n"
|
||||||
|
time.sleep(3) # 每隔3秒发送一段数据
|
||||||
|
|
||||||
|
combined_data = {}
|
||||||
|
for segment in data_segments:
|
||||||
|
for outer_key, inner_dict in segment.items():
|
||||||
|
inner_key, inner_value = next(iter(inner_dict.items()))
|
||||||
|
combined_data[inner_key] = inner_value
|
||||||
|
|
||||||
|
procurement_reqs = {
|
||||||
|
"采购项目": "混凝土",
|
||||||
|
"数量": "500立方米",
|
||||||
|
"规格": "C30"
|
||||||
|
}
|
||||||
|
tech_deviation_response = {
|
||||||
|
'message': 'procurement_reqs',
|
||||||
|
'filename': filename,
|
||||||
|
'data': json.dumps(procurement_reqs, ensure_ascii=False)
|
||||||
|
}
|
||||||
|
|
||||||
|
zige_deviation_table = {
|
||||||
|
"资格性检查": ["具有独立承担民事责任的能力;", "具有良好的商业信誉和健全的财务会计制度;",
|
||||||
|
"具有履行合同所必需的设备和专业技术能力;", "有依法缴纳税收和社会保障资金的良好记录;",
|
||||||
|
"参加政府采购活动前三年内,在经营活动中没有重大违法记录;"]
|
||||||
|
}
|
||||||
|
fuhe_deviation_table = {
|
||||||
|
"符合性检查": ["符合竞争性磋商文件第一部分竞争性磋商公告“六、其它补充事宜”第 1、2 条要求。",
|
||||||
|
"响应性文件签字和盖章齐全。", "满足磋商有效期。",
|
||||||
|
"供应商所提交的报价表未出现两个或两个以上不同报价。"]
|
||||||
|
}
|
||||||
|
shangwu_deviation_table = {
|
||||||
|
"商务要求偏离表": ["交货期(工期):合同签订之日起 15 个日历天内完成,并通过项目验收。", "采购人指定地点",
|
||||||
|
"本项目报价须为固定总价,包含但不限于:采购、实施、调试、试运行、验收、运维等所有完成本项目相关的一切费用。",
|
||||||
|
"项目质保期为一年,服务内容包括项目所有硬件和软件,质保期自项目验收合格后起算。",
|
||||||
|
"中标人在合同约定时间内完成所供货物的安装集成并投入正常使用,经采购人组织验收(终验)合格后,按合同约定及相关规定支付货款。"]
|
||||||
|
}
|
||||||
|
shangwu_star_deviation_table = {
|
||||||
|
"商务带星要求偏离表": ["交货期(工期):合同签订之日起 15 个日历天内完成,并通过项目验收。", "采购人指定地点",
|
||||||
|
"本项目报价须为固定总价,包含但不限于:采购、实施、调试、试运行、验收、运维等所有完成本项目相关的一切费用。",
|
||||||
|
"项目质保期为一年,服务内容包括项目所有硬件和软件,质保期自项目验收合格后起算。",
|
||||||
|
"中标人在合同约定时间内完成所供货物的安装集成并投入正常使用,经采购人组织验收(终验)合格后,按合同约定及相关规定支付货款。"]
|
||||||
|
}
|
||||||
|
jishu_star_deviation_table = {
|
||||||
|
"高清数字枪机": ["6、▲补光距离:红外光最远可达 30 m,白光最远可达 30 m", "▲音频: 1个内置麦克风"],
|
||||||
|
"高清数字半球机": ["▲音频:内置 1个麦克风", "▲供电方式: DC:12 V ± 25%,支持防反接保护; PoE: 802.3af,Class 3"],
|
||||||
|
"网络硬盘录像机": ["▲可接入 1T、2T、3T、4T、6T、8T、10T、12TB、14TB、16TB、 18TB、20TB容量的 SATA接口硬盘;",
|
||||||
|
"▲样机可在预览界面随意选择一个或多个通道,在预警面板实 时展示此通道的目标抓拍信息,包括:事件名称、事件触发时间、 人脸抓图;针对人脸比对同时显示姓名、相似度;针对车辆报警 同时显示车牌;针对人体和车辆目标,可分别显示“人体”、“车 辆”。",
|
||||||
|
"▲具有存储安全保障功能,当存储压力过高或硬盘出现性能 不足时,可优先录像业务存储;"]
|
||||||
|
}
|
||||||
|
|
||||||
|
zigefuhe_deviation_response = {
|
||||||
|
'message': 'zigefuhe_deviation',
|
||||||
|
'filename': filename,
|
||||||
|
'data': json.dumps(zige_deviation_table, ensure_ascii=False)
|
||||||
|
}
|
||||||
|
shangwu_deviation_response = {
|
||||||
|
'message': 'shangwu_deviation',
|
||||||
|
'filename': filename,
|
||||||
|
'data': json.dumps(shangwu_deviation_table, ensure_ascii=False)
|
||||||
|
}
|
||||||
|
shangwu_star_deviation_response = {
|
||||||
|
'message': 'shangwu_star_deviation',
|
||||||
|
'filename': filename,
|
||||||
|
'data': json.dumps(shangwu_star_deviation_table, ensure_ascii=False)
|
||||||
|
}
|
||||||
|
jishu_star_deviation_response = {
|
||||||
|
'message': 'jishu_star_deviation',
|
||||||
|
'filename': filename,
|
||||||
|
'data': json.dumps(jishu_star_deviation_table, ensure_ascii=False)
|
||||||
|
}
|
||||||
|
|
||||||
|
yield f"data:{json.dumps(zigefuhe_deviation_response, ensure_ascii=False)}\n\n"
|
||||||
|
yield f"data:{json.dumps(shangwu_deviation_response, ensure_ascii=False)}\n\n"
|
||||||
|
yield f"data:{json.dumps(shangwu_star_deviation_response, ensure_ascii=False)}\n\n"
|
||||||
|
yield f"data:{json.dumps(jishu_star_deviation_response, ensure_ascii=False)}\n\n"
|
||||||
|
yield f"data: {json.dumps(tech_deviation_response, ensure_ascii=False)}\n\n"
|
||||||
|
|
||||||
|
complete_response = {
|
||||||
|
'message': 'Combined data',
|
||||||
|
'filename': filename,
|
||||||
|
'data': combined_data
|
||||||
|
}
|
||||||
|
yield f"data: {json.dumps(complete_response, ensure_ascii=False)}\n\n"
|
||||||
|
|
||||||
|
final_response = {
|
||||||
|
'message': 'File processed successfully',
|
||||||
|
'filename': filename,
|
||||||
|
'data': 'END'
|
||||||
|
}
|
||||||
|
yield f"data: {json.dumps(final_response, ensure_ascii=False)}\n\n"
|
165
flask_app/routes/upload.py
Normal file
165
flask_app/routes/upload.py
Normal file
@ -0,0 +1,165 @@
|
|||||||
|
# flask_app/routes/upload.py
|
||||||
|
|
||||||
|
from flask import Blueprint, request, jsonify, Response, stream_with_context, g
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
|
||||||
|
from flask_app.main.download import download_file
|
||||||
|
from flask_app.main.工程标解析main import engineering_bid_main
|
||||||
|
from flask_app.货物标.货物标解析main import goods_bid_main
|
||||||
|
from flask_app.general.post_processing import outer_post_processing
|
||||||
|
from flask_app.general.接口_技术偏离表 import get_tech_and_business_deviation
|
||||||
|
from flask_app.routes.utils import generate_deviation_response, validate_request
|
||||||
|
from flask_app.logger_setup import CSTFormatter
|
||||||
|
|
||||||
|
upload_bp = Blueprint('upload', __name__)
|
||||||
|
@upload_bp.route('/upload', methods=['POST'])
|
||||||
|
def zbparse():
|
||||||
|
logger = g.logger
|
||||||
|
logger.info("zbparse start!!!")
|
||||||
|
received_data = request.get_json()
|
||||||
|
logger.info("Received JSON data: " + str(received_data))
|
||||||
|
validation = validate_request()
|
||||||
|
|
||||||
|
if isinstance(validation, tuple) and len(validation) == 2 and isinstance(validation[0], str):
|
||||||
|
file_url, zb_type = validation
|
||||||
|
else:
|
||||||
|
return validation # 错误响应
|
||||||
|
|
||||||
|
try:
|
||||||
|
logger.info("starting parsing url:" + file_url)
|
||||||
|
return Response(stream_with_context(process_and_stream(file_url, zb_type)), content_type='text/event-stream')
|
||||||
|
except Exception as e:
|
||||||
|
logger.error('Exception occurred: ' + str(e))
|
||||||
|
return jsonify({'error': str(e)}), 500
|
||||||
|
|
||||||
|
def process_and_stream(file_url, zb_type):
|
||||||
|
"""
|
||||||
|
下载文件并进行处理,支持工程标和货物标的处理。
|
||||||
|
"""
|
||||||
|
logger = g.logger
|
||||||
|
unique_id = g.unique_id
|
||||||
|
output_folder = g.output_folder
|
||||||
|
filename = "ztbfile"
|
||||||
|
downloaded_filename = os.path.join(output_folder, filename)
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
try:
|
||||||
|
downloaded = download_file(file_url, downloaded_filename)
|
||||||
|
if not downloaded:
|
||||||
|
logger.error("下载文件失败或不支持的文件类型")
|
||||||
|
error_response = {
|
||||||
|
'message': 'File processing failed',
|
||||||
|
'filename': '',
|
||||||
|
'data': json.dumps({'error': 'File processing failed'})
|
||||||
|
}
|
||||||
|
yield f"data: {json.dumps(error_response)}\n\n"
|
||||||
|
return
|
||||||
|
|
||||||
|
downloaded_filepath, file_type = downloaded
|
||||||
|
|
||||||
|
if file_type == 4:
|
||||||
|
logger.error("不支持的文件类型")
|
||||||
|
error_response = {
|
||||||
|
'message': 'Unsupported file type',
|
||||||
|
'filename': None,
|
||||||
|
'data': json.dumps({'error': 'Unsupported file type'})
|
||||||
|
}
|
||||||
|
yield f"data: {json.dumps(error_response)}\n\n"
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info("本地文件路径: " + downloaded_filepath)
|
||||||
|
|
||||||
|
combined_data = {}
|
||||||
|
good_list = None
|
||||||
|
|
||||||
|
processing_functions = {
|
||||||
|
1: engineering_bid_main,
|
||||||
|
2: goods_bid_main
|
||||||
|
}
|
||||||
|
processing_func = processing_functions.get(zb_type, engineering_bid_main)
|
||||||
|
|
||||||
|
for data in processing_func(output_folder, downloaded_filepath, file_type, unique_id):
|
||||||
|
if not data.strip():
|
||||||
|
logger.error("Received empty data, skipping JSON parsing.")
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
parsed_data = json.loads(data)
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
logger.error(f"Failed to decode JSON: {e}")
|
||||||
|
logger.error(f"Data received: {data}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
if 'good_list' in parsed_data:
|
||||||
|
good_list = parsed_data['good_list']
|
||||||
|
logger.info("Collected good_list from the processing function: %s", good_list)
|
||||||
|
continue
|
||||||
|
|
||||||
|
for outer_key, inner_dict in parsed_data.items():
|
||||||
|
if isinstance(inner_dict, dict):
|
||||||
|
combined_data.update(inner_dict)
|
||||||
|
|
||||||
|
response = {
|
||||||
|
'message': 'Processing',
|
||||||
|
'filename': os.path.basename(downloaded_filepath),
|
||||||
|
'data': data
|
||||||
|
}
|
||||||
|
yield f"data: {json.dumps(response, ensure_ascii=False)}\n\n"
|
||||||
|
|
||||||
|
base_end_time = time.time()
|
||||||
|
logger.info(f"分段解析完成,耗时:{base_end_time - start_time:.2f} 秒")
|
||||||
|
|
||||||
|
output_json_path = os.path.join(output_folder, 'final_result.json')
|
||||||
|
extracted_info_path = os.path.join(output_folder, 'extracted_result.json')
|
||||||
|
includes = ["基础信息", "资格审查", "商务评分", "技术评分", "无效标与废标项", "投标文件要求", "开评定标流程"]
|
||||||
|
final_result, extracted_info, tech_deviation, tech_star_deviation, business_deviation, business_star_deviation, zigefuhe_deviation = outer_post_processing(combined_data, includes, good_list)
|
||||||
|
|
||||||
|
tech_deviation_response, tech_deviation_star_response, zigefuhe_deviation_response, shangwu_deviation_response, shangwu_star_deviation_response = generate_deviation_response(
|
||||||
|
tech_deviation, tech_star_deviation, business_deviation, business_star_deviation, zigefuhe_deviation, logger)
|
||||||
|
yield f"data: {json.dumps(tech_deviation_response, ensure_ascii=False)}\n\n"
|
||||||
|
yield f"data: {json.dumps(tech_deviation_star_response, ensure_ascii=False)}\n\n"
|
||||||
|
yield f"data: {json.dumps(zigefuhe_deviation_response, ensure_ascii=False)}\n\n"
|
||||||
|
yield f"data: {json.dumps(shangwu_deviation_response, ensure_ascii=False)}\n\n"
|
||||||
|
yield f"data: {json.dumps(shangwu_star_deviation_response, ensure_ascii=False)}\n\n"
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(extracted_info_path, 'w', encoding='utf-8') as json_file:
|
||||||
|
json.dump(extracted_info, json_file, ensure_ascii=False, indent=4)
|
||||||
|
logger.info(f"摘取后的数据已保存到 '{extracted_info_path}'")
|
||||||
|
except IOError as e:
|
||||||
|
logger.error(f"保存JSON文件时出错: {e}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(output_json_path, 'w', encoding='utf-8') as json_file:
|
||||||
|
json.dump(final_result, json_file, ensure_ascii=False, indent=4)
|
||||||
|
logger.info(f"合并后的数据已保存到 '{output_json_path}'")
|
||||||
|
except IOError as e:
|
||||||
|
logger.error(f"保存JSON文件时出错: {e}")
|
||||||
|
|
||||||
|
extracted_info_response = {
|
||||||
|
'message': 'extracted_info',
|
||||||
|
'filename': os.path.basename(downloaded_filepath),
|
||||||
|
'data': json.dumps(extracted_info, ensure_ascii=False)
|
||||||
|
}
|
||||||
|
yield f"data: {json.dumps(extracted_info_response, ensure_ascii=False)}\n\n"
|
||||||
|
|
||||||
|
complete_response = {
|
||||||
|
'message': 'Combined_data',
|
||||||
|
'filename': os.path.basename(downloaded_filepath),
|
||||||
|
'data': json.dumps(final_result, ensure_ascii=False)
|
||||||
|
}
|
||||||
|
yield f"data: {json.dumps(complete_response, ensure_ascii=False)}\n\n"
|
||||||
|
|
||||||
|
final_response = {
|
||||||
|
'message': 'File uploaded and processed successfully',
|
||||||
|
'filename': os.path.basename(downloaded_filepath),
|
||||||
|
'data': 'END'
|
||||||
|
}
|
||||||
|
yield f"data: {json.dumps(final_response)}\n\n"
|
||||||
|
|
||||||
|
finally:
|
||||||
|
end_time = time.time()
|
||||||
|
duration = end_time - start_time
|
||||||
|
logger.info(f"Total processing time: {duration:.2f} seconds")
|
51
flask_app/routes/utils.py
Normal file
51
flask_app/routes/utils.py
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
import json
|
||||||
|
from flask import request,jsonify
|
||||||
|
def validate_request():
|
||||||
|
"""
|
||||||
|
验证请求中的JSON数据。
|
||||||
|
"""
|
||||||
|
if not request.is_json:
|
||||||
|
return jsonify({'error': 'Missing JSON in request'}), 400
|
||||||
|
file_url = request.json.get('file_url')
|
||||||
|
zb_type = request.json.get('zb_type', 1)
|
||||||
|
if not file_url:
|
||||||
|
return jsonify({'error': 'No file URL provided'}), 400
|
||||||
|
try:
|
||||||
|
zb_type = int(zb_type)
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
return jsonify({'error': 'Invalid zb_type provided'}), 400
|
||||||
|
return file_url, zb_type
|
||||||
|
def generate_deviation_response(tech_deviation, tech_star_deviation, business_deviation, business_star_deviation,
|
||||||
|
zigefuhe_deviation, logger):
|
||||||
|
logger.info(f"技术偏离表: {json.dumps(tech_deviation, ensure_ascii=False, indent=4)}")
|
||||||
|
logger.info(f"技术偏离表带星: {json.dumps(tech_star_deviation, ensure_ascii=False, indent=4)}")
|
||||||
|
logger.info(f"商务偏离表: {json.dumps(business_deviation, ensure_ascii=False, indent=4)}")
|
||||||
|
logger.info(f"商务偏离表带星: {json.dumps(business_star_deviation, ensure_ascii=False, indent=4)}")
|
||||||
|
logger.info(f"资格检查偏离表: {json.dumps(zigefuhe_deviation, ensure_ascii=False, indent=4)}")
|
||||||
|
|
||||||
|
tech_deviation_response = {
|
||||||
|
'message': 'procurement_reqs',
|
||||||
|
'filename': 'procurement_reqs',
|
||||||
|
'data': json.dumps(tech_deviation, ensure_ascii=False)
|
||||||
|
}
|
||||||
|
tech_deviation_star_response = {
|
||||||
|
'message': 'jishu_star_deviation',
|
||||||
|
'filename': 'jishu_star_deviation',
|
||||||
|
'data': json.dumps(tech_star_deviation, ensure_ascii=False)
|
||||||
|
}
|
||||||
|
zigefuhe_deviation_response = {
|
||||||
|
'message': 'zigefuhe_deviation',
|
||||||
|
'filename': 'zigefuhe_deviation',
|
||||||
|
'data': json.dumps(zigefuhe_deviation, ensure_ascii=False)
|
||||||
|
}
|
||||||
|
shangwu_deviation_response = {
|
||||||
|
'message': 'shangwu_deviation',
|
||||||
|
'filename': 'shangwu_deviation',
|
||||||
|
'data': json.dumps(business_deviation, ensure_ascii=False)
|
||||||
|
}
|
||||||
|
shangwu_star_deviation_response = {
|
||||||
|
'message': 'shangwu_star_deviation',
|
||||||
|
'filename': 'shangwu_star_deviation',
|
||||||
|
'data': json.dumps(business_star_deviation, ensure_ascii=False)
|
||||||
|
}
|
||||||
|
return tech_deviation_response, tech_deviation_star_response, zigefuhe_deviation_response, shangwu_deviation_response, shangwu_star_deviation_response
|
@ -1,745 +1,53 @@
|
|||||||
|
# flask_app/start_up.py
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import shutil
|
from flask import Flask, request
|
||||||
import time
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from flask import Flask, request, jsonify, Response, stream_with_context, g
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
from flask_app.general.接口_小解析 import little_parse_main
|
|
||||||
from flask_app.main.download import download_file
|
|
||||||
from flask_app.general.post_processing import outer_post_processing
|
|
||||||
from flask_app.main.工程标解析main import engineering_bid_main
|
|
||||||
from flask_app.货物标.货物标解析main import goods_bid_main
|
|
||||||
from flask_app.general.接口_技术偏离表 import get_tech_and_business_deviation
|
|
||||||
|
|
||||||
app = Flask(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class CSTFormatter(logging.Formatter):
|
from flask_app.logger_setup import CSTFormatter, create_logger
|
||||||
"""自定义的 Formatter,将日志的时间戳调整为中国标准时间(UTC+8)"""
|
from flask_app.routes.get_deviation import get_deviation_bp
|
||||||
|
from flask_app.routes.little_zbparse import little_zbparse_bp
|
||||||
|
from flask_app.routes.upload import upload_bp
|
||||||
|
from flask_app.routes.test_zbparse import test_zbparse_bp
|
||||||
|
|
||||||
def formatTime(self, record, datefmt=None):
|
def create_app():
|
||||||
ct = datetime.fromtimestamp(record.created) + timedelta(hours=8)
|
app = Flask(__name__)
|
||||||
if datefmt:
|
|
||||||
s = ct.strftime(datefmt)
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
s = ct.strftime("%Y-%m-%d %H:%M:%S")
|
|
||||||
if self.usesTime():
|
|
||||||
s = f"{s},{record.msecs:03d}"
|
|
||||||
except ValueError:
|
|
||||||
s = ct.strftime("%Y-%m-%d %H:%M:%S")
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
# 设置日志的全局配置(如果需要)
|
||||||
@app.before_request
|
handler = logging.StreamHandler()
|
||||||
def before_request():
|
handler.setFormatter(CSTFormatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'))
|
||||||
"""
|
app.logger.addHandler(handler)
|
||||||
每个请求开始前初始化 logger 和 output_folder,
|
app.logger.setLevel(logging.INFO)
|
||||||
根据请求的端点选择不同的子文件夹。
|
@app.before_request
|
||||||
"""
|
def before_request():
|
||||||
# 确定当前请求的端点
|
"""
|
||||||
endpoint = request.endpoint
|
每个请求开始前初始化 logger 和 output_folder,
|
||||||
# 映射端点到子文件夹
|
根据请求的端点选择不同的子文件夹。
|
||||||
subfolder_map = {
|
"""
|
||||||
'get_deviation': 'output3',
|
# 确定当前请求的端点
|
||||||
'little_zbparse': 'output2',
|
blueprint = request.blueprint
|
||||||
'zbparse': 'output1'
|
# 映射端点到子文件夹
|
||||||
}
|
subfolder_map = {
|
||||||
# 获取对应的子文件夹,默认为 'output1'
|
'get_deviation': 'output3',
|
||||||
subfolder = subfolder_map.get(endpoint, 'output1')
|
'little_zbparse': 'output2',
|
||||||
# 创建 logger 和 output_folder
|
'upload': 'output1',
|
||||||
create_logger(subfolder)
|
'test_zbparse': 'test_output'
|
||||||
|
|
||||||
|
|
||||||
def create_logger(subfolder):
|
|
||||||
"""
|
|
||||||
创建一个唯一的 logger 和对应的输出文件夹。
|
|
||||||
|
|
||||||
参数:
|
|
||||||
subfolder (str): 子文件夹名称,如 'output1', 'output2', 'output3'
|
|
||||||
"""
|
|
||||||
unique_id = str(uuid.uuid4())
|
|
||||||
g.unique_id = unique_id
|
|
||||||
output_folder = os.path.join("flask_app", "static", "output", subfolder, unique_id)
|
|
||||||
os.makedirs(output_folder, exist_ok=True)
|
|
||||||
log_filename = "log.txt"
|
|
||||||
log_path = os.path.join(output_folder, log_filename)
|
|
||||||
logger = logging.getLogger(unique_id)
|
|
||||||
if not logger.handlers:
|
|
||||||
file_handler = logging.FileHandler(log_path)
|
|
||||||
file_formatter = CSTFormatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
|
||||||
file_handler.setFormatter(file_formatter)
|
|
||||||
logger.addHandler(file_handler)
|
|
||||||
stream_handler = logging.StreamHandler()
|
|
||||||
stream_handler.setFormatter(logging.Formatter('%(message)s'))
|
|
||||||
logger.addHandler(stream_handler)
|
|
||||||
logger.setLevel(logging.INFO)
|
|
||||||
g.logger = logger
|
|
||||||
g.output_folder = output_folder
|
|
||||||
|
|
||||||
|
|
||||||
# @app.route('/upload', methods=['POST'])
|
|
||||||
# def zbparse():
|
|
||||||
# logger=g.logger
|
|
||||||
# file_url = validate_request()
|
|
||||||
# if isinstance(file_url, tuple): # Check if the returned value is an error response
|
|
||||||
# return file_url
|
|
||||||
# try:
|
|
||||||
# logger.info("starting parsing url:" + file_url)
|
|
||||||
# final_json_path, output_folder= download_and_process_file(file_url)
|
|
||||||
# if not final_json_path:
|
|
||||||
# return jsonify({'error': 'File processing failed'}), 500
|
|
||||||
# response = generate_response(final_json_path) # 先获取响应内容
|
|
||||||
# # remove_directory(output_folder) # 然后删除文件夹
|
|
||||||
# return response # 最后返回获取的响应
|
|
||||||
# except Exception as e:
|
|
||||||
# logger.error('Exception occurred: ' + str(e)) # 使用全局 logger 记录
|
|
||||||
# return jsonify({'error': str(e)}), 500
|
|
||||||
|
|
||||||
# def download_and_process_file(file_url):
|
|
||||||
# logger = g.logger
|
|
||||||
# unique_id = g.unique_id
|
|
||||||
# output_folder = f"flask_app/static/output/{unique_id}" # 直接使用全局 unique_id 构建路径
|
|
||||||
# filename = "ztbfile"
|
|
||||||
# downloaded_filename = os.path.join(output_folder, filename)
|
|
||||||
#
|
|
||||||
# # 下载文件,假设 download_file 函数已正确处理异常并返回文件路径
|
|
||||||
# downloaded_filepath, file_type = download_file(file_url, downloaded_filename)
|
|
||||||
#
|
|
||||||
# if downloaded_filepath is None or file_type == 3:
|
|
||||||
# logger.error("Unsupported file type or failed to download file")
|
|
||||||
# return None, output_folder
|
|
||||||
#
|
|
||||||
# logger.info("Local file path: " + downloaded_filepath)
|
|
||||||
# processed_file_path = engineering_bid_main(output_folder, downloaded_filepath, file_type, unique_id)
|
|
||||||
# return processed_file_path, output_folder
|
|
||||||
|
|
||||||
# def generate_response(final_json_path):
|
|
||||||
# logger = g.logger
|
|
||||||
# # 检查final_json_path是否为空或None
|
|
||||||
# if not final_json_path:
|
|
||||||
# logger.error('Empty or None path provided for final_json.')
|
|
||||||
# return jsonify({'error': 'No path provided for final_json.'}), 400
|
|
||||||
# if not os.path.exists(final_json_path):
|
|
||||||
# logger.error('final_json not found at path: ' + final_json_path)
|
|
||||||
# return jsonify({'error': 'final_json not found'}), 404
|
|
||||||
# with open(final_json_path, 'r', encoding='utf-8') as f:
|
|
||||||
# logger.info('final_json_path:' + final_json_path)
|
|
||||||
# zbparse_data = json.load(f)
|
|
||||||
# json_str = json.dumps(zbparse_data, ensure_ascii=False)
|
|
||||||
# return jsonify({
|
|
||||||
# 'message': 'File uploaded and processed successfully',
|
|
||||||
# 'filename': os.path.basename(final_json_path),
|
|
||||||
# 'data': json_str
|
|
||||||
# })
|
|
||||||
|
|
||||||
def validate_request(default_zb_type=1):
|
|
||||||
"""
|
|
||||||
验证请求中的JSON数据。
|
|
||||||
|
|
||||||
参数:
|
|
||||||
default_zb_type (int): 默认的zb_type值。
|
|
||||||
|
|
||||||
返回:
|
|
||||||
tuple or Flask响应: 如果验证通过,返回(file_url, zb_type);
|
|
||||||
如果失败,返回错误响应。
|
|
||||||
"""
|
|
||||||
if not request.is_json:
|
|
||||||
return jsonify({'error': 'Missing JSON in request'}), 400
|
|
||||||
file_url = request.json.get('file_url')
|
|
||||||
zb_type = request.json.get('zb_type', default_zb_type)
|
|
||||||
if not file_url:
|
|
||||||
return jsonify({'error': 'No file URL provided'}), 400
|
|
||||||
try:
|
|
||||||
zb_type = int(zb_type)
|
|
||||||
except (ValueError, TypeError):
|
|
||||||
return jsonify({'error': 'Invalid zb_type provided'}), 400
|
|
||||||
return file_url, zb_type
|
|
||||||
|
|
||||||
def generate_deviation_response(tech_deviation,tech_star_deviation,business_deviation,business_star_deviation,zigefuhe_deviation,logger):
|
|
||||||
logger.info(
|
|
||||||
f"技术偏离表: {json.dumps(tech_deviation, ensure_ascii=False, indent=4)}"
|
|
||||||
) # 添加日志记录
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"技术偏离表带星: {json.dumps(tech_star_deviation, ensure_ascii=False, indent=4)}"
|
|
||||||
) # 添加日志记录
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"商务偏离表: {json.dumps(business_deviation, ensure_ascii=False, indent=4)}"
|
|
||||||
) # 添加日志记录
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"商务偏离表带星: {json.dumps(business_star_deviation, ensure_ascii=False, indent=4)}"
|
|
||||||
) # 添加日志记录
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"资格检查偏离表: {json.dumps(zigefuhe_deviation, ensure_ascii=False, indent=4)}"
|
|
||||||
) # 添加日志记录
|
|
||||||
|
|
||||||
# 构建响应数据
|
|
||||||
tech_deviation_response = {
|
|
||||||
'message': 'procurement_reqs',
|
|
||||||
'filename': 'procurement_reqs',
|
|
||||||
'data': json.dumps(tech_deviation, ensure_ascii=False)
|
|
||||||
}
|
|
||||||
tech_deviation_star_response = {
|
|
||||||
'message': 'jishu_star_deviation',
|
|
||||||
'filename': 'jishu_star_deviation',
|
|
||||||
'data': json.dumps(tech_star_deviation, ensure_ascii=False)
|
|
||||||
}
|
|
||||||
zigefuhe_deviation_response = {
|
|
||||||
'message': 'zigefuhe_deviation',
|
|
||||||
'filename': 'zigefuhe_deviation',
|
|
||||||
'data': json.dumps(zigefuhe_deviation, ensure_ascii=False)
|
|
||||||
}
|
|
||||||
|
|
||||||
shangwu_deviation_response = {
|
|
||||||
'message': 'shangwu_deviation',
|
|
||||||
'filename': 'shangwu_deviation',
|
|
||||||
'data': json.dumps(business_deviation, ensure_ascii=False)
|
|
||||||
}
|
|
||||||
shangwu_star_deviation_response = {
|
|
||||||
'message': 'shangwu_star_deviation',
|
|
||||||
'filename': 'shangwu_star_deviation',
|
|
||||||
'data': json.dumps(business_star_deviation, ensure_ascii=False)
|
|
||||||
}
|
|
||||||
return tech_deviation_response,tech_deviation_star_response,zigefuhe_deviation_response,shangwu_deviation_response,shangwu_star_deviation_response
|
|
||||||
# 提取采购需求
|
|
||||||
@app.route('/get_deviation', methods=['POST'])
|
|
||||||
def get_deviation():
|
|
||||||
logger = g.logger
|
|
||||||
unique_id = g.unique_id
|
|
||||||
file_url, zb_type = validate_request()
|
|
||||||
|
|
||||||
if isinstance(file_url, tuple): # 检查是否为错误响应
|
|
||||||
return file_url
|
|
||||||
try:
|
|
||||||
logger.info("开始解析 URL: " + file_url)
|
|
||||||
if zb_type != 2:
|
|
||||||
logger.error(f"无效的 zb_type: {zb_type}. 期望 zb_type: 2")
|
|
||||||
return jsonify({
|
|
||||||
'error': 'Invalid zb_type',
|
|
||||||
'message': '此端点仅支持 zb_type 2 (采购需求)'
|
|
||||||
}), 400
|
|
||||||
else:
|
|
||||||
tech_deviation,tech_star_deviation,business_deviation,business_star_deviation,zigefuhe_deviation = download_and_process_file_for_deviation(file_url, unique_id)
|
|
||||||
tech_deviation_response,tech_deviation_star_response,zigefuhe_deviation_response,shangwu_deviation_response,shangwu_star_deviation_response=generate_deviation_response(
|
|
||||||
tech_deviation,tech_star_deviation,business_deviation,business_star_deviation,zigefuhe_deviation,logger)
|
|
||||||
# 发送最终响应
|
|
||||||
final_response = {
|
|
||||||
'message': 'processed successfully',
|
|
||||||
'filename': 'END',
|
|
||||||
'data': 'END'
|
|
||||||
}
|
|
||||||
@stream_with_context
|
|
||||||
def generate():
|
|
||||||
# 发送第一个 JSON 对象
|
|
||||||
yield f"data: {json.dumps(tech_deviation_response, ensure_ascii=False)}\n\n"
|
|
||||||
yield f"data: {json.dumps(tech_deviation_star_response, ensure_ascii=False)}\n\n"
|
|
||||||
yield f"data: {json.dumps(zigefuhe_deviation_response, ensure_ascii=False)}\n\n"
|
|
||||||
yield f"data: {json.dumps(shangwu_deviation_response, ensure_ascii=False)}\n\n"
|
|
||||||
yield f"data: {json.dumps(shangwu_star_deviation_response, ensure_ascii=False)}\n\n"
|
|
||||||
# 发送最终响应以标识结束
|
|
||||||
yield f"data: {json.dumps(final_response, ensure_ascii=False)}\n\n"
|
|
||||||
|
|
||||||
# 返回流式响应,使用 SSE(服务器发送事件)
|
|
||||||
return Response(generate(), mimetype='text/event-stream')
|
|
||||||
except Exception as e:
|
|
||||||
logger.error('发生异常: ' + str(e)) # 使用全局 logger 记录
|
|
||||||
return jsonify({'error': str(e)}), 500
|
|
||||||
|
|
||||||
|
|
||||||
# 提取采购需求
|
|
||||||
def download_and_process_file_for_deviation(file_url,unique_id):
|
|
||||||
"""
|
|
||||||
下载并处理采购需求文件。
|
|
||||||
|
|
||||||
参数:
|
|
||||||
file_url (str): 文件的URL地址。
|
|
||||||
|
|
||||||
返回:
|
|
||||||
dict: 处理后的数据。
|
|
||||||
"""
|
|
||||||
logger = g.logger
|
|
||||||
output_folder = g.output_folder # 使用全局 output_folder
|
|
||||||
filename = "ztbfile"
|
|
||||||
downloaded_filename = os.path.join(output_folder, filename)
|
|
||||||
# 下载文件,假设 download_file 函数已正确处理异常并返回文件路径
|
|
||||||
downloaded_filepath, file_type = download_file(file_url, downloaded_filename)
|
|
||||||
if downloaded_filepath is None or file_type == 4:
|
|
||||||
logger.error("Unsupported file type or failed to download file")
|
|
||||||
return None
|
|
||||||
logger.info("Local file path: " + downloaded_filepath)
|
|
||||||
tech_deviation,tech_star_deviation,business_deviation,business_star_deviation,zigefuhe_deviation = get_tech_and_business_deviation(downloaded_filepath, file_type, unique_id,output_folder)
|
|
||||||
return tech_deviation,tech_star_deviation,business_deviation,business_star_deviation,zigefuhe_deviation
|
|
||||||
|
|
||||||
|
|
||||||
@app.route('/little_zbparse', methods=['POST'])
|
|
||||||
def little_zbparse():
|
|
||||||
logger = g.logger
|
|
||||||
file_url, zb_type = validate_request()
|
|
||||||
if isinstance(file_url, tuple): # Check if the returned value is an error response
|
|
||||||
return file_url
|
|
||||||
try:
|
|
||||||
logger.info("starting parsing url:" + file_url)
|
|
||||||
final_json_path = download_and_process_file(file_url, zb_type)
|
|
||||||
if not final_json_path:
|
|
||||||
return jsonify({'error': 'File processing failed'}), 500
|
|
||||||
response = generate_response(final_json_path) # 先获取响应内容
|
|
||||||
# remove_directory(output_folder) # 然后删除文件夹
|
|
||||||
return response # 最后返回获取的响应
|
|
||||||
except Exception as e:
|
|
||||||
logger.error('Exception occurred: ' + str(e)) # 使用全局 logger 记录
|
|
||||||
return jsonify({'error': str(e)}), 500
|
|
||||||
|
|
||||||
|
|
||||||
def download_and_process_file(file_url, zb_type):
|
|
||||||
"""
|
|
||||||
下载并处理文件,根据zb_type选择处理函数。
|
|
||||||
|
|
||||||
参数:
|
|
||||||
file_url (str): 文件的URL地址。
|
|
||||||
zb_type (int): 标的类型,1表示工程标,2表示货物标。
|
|
||||||
|
|
||||||
返回:
|
|
||||||
str: 处理后的文件路径。
|
|
||||||
"""
|
|
||||||
logger = g.logger
|
|
||||||
output_folder = g.output_folder # 使用全局 output_folder
|
|
||||||
filename = "ztbfile"
|
|
||||||
downloaded_filename = os.path.join(output_folder, filename)
|
|
||||||
|
|
||||||
# 下载文件,假设 download_file 函数已正确处理异常并返回文件路径
|
|
||||||
downloaded_filepath, file_type = download_file(file_url, downloaded_filename)
|
|
||||||
|
|
||||||
if downloaded_filepath is None or file_type == 4:
|
|
||||||
logger.error("Unsupported file type or failed to download file")
|
|
||||||
return None
|
|
||||||
|
|
||||||
logger.info("Local file path: " + downloaded_filepath)
|
|
||||||
processed_file_path = little_parse_main(output_folder, downloaded_filepath, file_type, zb_type, g.unique_id)
|
|
||||||
return processed_file_path
|
|
||||||
|
|
||||||
|
|
||||||
def generate_response(final_json_path):
|
|
||||||
logger = g.logger
|
|
||||||
# 检查final_json_path是否为空或None
|
|
||||||
if not final_json_path:
|
|
||||||
logger.error('Empty or None path provided for final_json.')
|
|
||||||
return jsonify({'error': 'No path provided for final_json.'}), 400
|
|
||||||
if not os.path.exists(final_json_path):
|
|
||||||
logger.error('final_json not found at path: ' + final_json_path)
|
|
||||||
return jsonify({'error': 'final_json not found'}), 404
|
|
||||||
with open(final_json_path, 'r', encoding='utf-8') as f:
|
|
||||||
# logger.info('extracted_info_path:' + final_json_path)
|
|
||||||
zbparse_data = json.load(f)
|
|
||||||
json_str = json.dumps(zbparse_data, ensure_ascii=False)
|
|
||||||
return jsonify({
|
|
||||||
'message': 'Little Parse processed successfully',
|
|
||||||
'filename': os.path.basename(final_json_path),
|
|
||||||
'data': json_str
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
# 流式
|
|
||||||
@app.route('/upload', methods=['POST'])
|
|
||||||
def zbparse():
|
|
||||||
logger = g.logger
|
|
||||||
logger.info("zbparse start!!!")
|
|
||||||
# 获取并显示接收到的 JSON 数据
|
|
||||||
received_data = request.get_json()
|
|
||||||
logger.info("Received JSON data: " + str(received_data))
|
|
||||||
file_url, zb_type = validate_request()
|
|
||||||
if isinstance(file_url, tuple): # Check if the returned value is an error response
|
|
||||||
return file_url
|
|
||||||
try:
|
|
||||||
logger.info("starting parsing url:" + file_url)
|
|
||||||
return Response(stream_with_context(process_and_stream(file_url, zb_type)), content_type='text/event-stream')
|
|
||||||
except Exception as e:
|
|
||||||
logger.error('Exception occurred: ' + str(e))
|
|
||||||
return jsonify({'error': str(e)}), 500
|
|
||||||
|
|
||||||
|
|
||||||
# 分段返回
|
|
||||||
def process_and_stream(file_url, zb_type):
|
|
||||||
"""
|
|
||||||
下载文件并进行处理,支持工程标和货物标的处理。
|
|
||||||
|
|
||||||
参数:
|
|
||||||
- file_url (str): 文件的URL地址。
|
|
||||||
- zb_type (int): 标的类型,1表示工程标,2表示货物标。
|
|
||||||
|
|
||||||
返回:
|
|
||||||
- generator: 生成处理过程中的流式响应。
|
|
||||||
"""
|
|
||||||
logger = g.logger
|
|
||||||
unique_id = g.unique_id
|
|
||||||
output_folder = g.output_folder
|
|
||||||
filename = "ztbfile"
|
|
||||||
downloaded_filename = os.path.join(output_folder, filename)
|
|
||||||
start_time = time.time() # 记录开始时间
|
|
||||||
|
|
||||||
try:
|
|
||||||
# 下载文件
|
|
||||||
downloaded = download_file(file_url, downloaded_filename)
|
|
||||||
if not downloaded:
|
|
||||||
logger.error("下载文件失败或不支持的文件类型")
|
|
||||||
error_response = {
|
|
||||||
'message': 'File processing failed',
|
|
||||||
'filename': '',
|
|
||||||
'data': json.dumps({'error': 'File processing failed'})
|
|
||||||
}
|
|
||||||
yield f"data: {json.dumps(error_response)}\n\n"
|
|
||||||
return
|
|
||||||
|
|
||||||
downloaded_filepath, file_type = downloaded
|
|
||||||
|
|
||||||
# 检查文件类型
|
|
||||||
if file_type == 4:
|
|
||||||
logger.error("不支持的文件类型")
|
|
||||||
error_response = {
|
|
||||||
'message': 'Unsupported file type',
|
|
||||||
'filename': None,
|
|
||||||
'data': json.dumps({'error': 'Unsupported file type'})
|
|
||||||
}
|
|
||||||
yield f"data: {json.dumps(error_response)}\n\n"
|
|
||||||
return
|
|
||||||
|
|
||||||
logger.info("本地文件路径: " + downloaded_filepath)
|
|
||||||
|
|
||||||
combined_data = {}
|
|
||||||
good_list = None
|
|
||||||
|
|
||||||
# 根据zb_type选择调用的处理函数
|
|
||||||
processing_functions = {
|
|
||||||
1: engineering_bid_main,
|
|
||||||
2: goods_bid_main
|
|
||||||
}
|
}
|
||||||
processing_func = processing_functions.get(zb_type, engineering_bid_main) # 默认按工程标解析
|
# 获取对应的子文件夹,默认为 'output1'
|
||||||
|
subfolder = subfolder_map.get(blueprint, 'output1')
|
||||||
|
# 创建 logger 和 output_folder
|
||||||
|
create_logger(app, subfolder)
|
||||||
|
|
||||||
# 从 processing_func 获取数据
|
# 注册蓝图
|
||||||
for data in processing_func(output_folder, downloaded_filepath, file_type, unique_id):
|
app.register_blueprint(get_deviation_bp)
|
||||||
if not data.strip():
|
app.register_blueprint(little_zbparse_bp)
|
||||||
logger.error("Received empty data, skipping JSON parsing.")
|
app.register_blueprint(upload_bp)
|
||||||
continue # Skip processing empty data
|
app.register_blueprint(test_zbparse_bp)
|
||||||
|
|
||||||
try:
|
|
||||||
parsed_data = json.loads(data)
|
|
||||||
except json.JSONDecodeError as e:
|
|
||||||
logger.error(f"Failed to decode JSON: {e}")
|
|
||||||
logger.error(f"Data received: {data}")
|
|
||||||
continue # Skip data if JSON parsing fails
|
|
||||||
|
|
||||||
if 'good_list' in parsed_data:
|
|
||||||
good_list = parsed_data['good_list']
|
|
||||||
logger.info("Collected good_list from the processing function: %s", good_list)
|
|
||||||
continue # Skip yielding good_list to the client
|
|
||||||
|
|
||||||
# 遍历 parsed_data 只提取内层内容进行合并
|
|
||||||
for outer_key, inner_dict in parsed_data.items():
|
|
||||||
if isinstance(inner_dict, dict):
|
|
||||||
combined_data.update(inner_dict)
|
|
||||||
# 日志记录已合并数据
|
|
||||||
|
|
||||||
# 每次数据更新后,流式返回当前进度
|
|
||||||
response = {
|
|
||||||
'message': 'Processing',
|
|
||||||
'filename': os.path.basename(downloaded_filepath),
|
|
||||||
'data': data
|
|
||||||
}
|
|
||||||
yield f"data: {json.dumps(response, ensure_ascii=False)}\n\n"
|
|
||||||
|
|
||||||
base_end_time = time.time()
|
|
||||||
logger.info(f"分段解析完成,耗时:{base_end_time - start_time:.2f} 秒")
|
|
||||||
|
|
||||||
|
|
||||||
# **保存 combined_data 到 output_folder 下的 'final_result.json'**
|
|
||||||
output_json_path = os.path.join(output_folder, 'final_result.json')
|
|
||||||
extracted_info_path = os.path.join(output_folder, 'extracted_result.json')
|
|
||||||
includes = ["基础信息", "资格审查", "商务评分", "技术评分", "无效标与废标项", "投标文件要求", "开评定标流程"]
|
|
||||||
final_result, extracted_info,tech_deviation,tech_star_deviation,business_deviation,business_star_deviation,zigefuhe_deviation = outer_post_processing(combined_data, includes, good_list)
|
|
||||||
|
|
||||||
tech_deviation_response, tech_deviation_star_response, zigefuhe_deviation_response, shangwu_deviation_response, shangwu_star_deviation_response = generate_deviation_response(
|
|
||||||
tech_deviation, tech_star_deviation, business_deviation, business_star_deviation, zigefuhe_deviation,
|
|
||||||
logger)
|
|
||||||
yield f"data: {json.dumps(tech_deviation_response, ensure_ascii=False)}\n\n"
|
|
||||||
yield f"data: {json.dumps(tech_deviation_star_response, ensure_ascii=False)}\n\n"
|
|
||||||
yield f"data: {json.dumps(zigefuhe_deviation_response, ensure_ascii=False)}\n\n"
|
|
||||||
yield f"data: {json.dumps(shangwu_deviation_response, ensure_ascii=False)}\n\n"
|
|
||||||
yield f"data: {json.dumps(shangwu_star_deviation_response, ensure_ascii=False)}\n\n"
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(extracted_info_path, 'w', encoding='utf-8') as json_file:
|
|
||||||
json.dump(extracted_info, json_file, ensure_ascii=False, indent=4)
|
|
||||||
logger.info(f"摘取后的数据已保存到 '{extracted_info_path}'")
|
|
||||||
except IOError as e:
|
|
||||||
logger.error(f"保存JSON文件时出错: {e}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(output_json_path, 'w', encoding='utf-8') as json_file:
|
|
||||||
json.dump(final_result, json_file, ensure_ascii=False, indent=4)
|
|
||||||
logger.info(f"合并后的数据已保存到 '{output_json_path}'")
|
|
||||||
except IOError as e:
|
|
||||||
logger.error(f"保存JSON文件时出错: {e}")
|
|
||||||
|
|
||||||
# 提取的数据
|
|
||||||
extracted_info_response = {
|
|
||||||
'message': 'extracted_info',
|
|
||||||
'filename': os.path.basename(downloaded_filepath),
|
|
||||||
'data': json.dumps(extracted_info, ensure_ascii=False)
|
|
||||||
}
|
|
||||||
yield f"data: {json.dumps(extracted_info_response, ensure_ascii=False)}\n\n"
|
|
||||||
|
|
||||||
# 最后发送合并后的完整数据
|
|
||||||
complete_response = {
|
|
||||||
'message': 'Combined_data',
|
|
||||||
'filename': os.path.basename(downloaded_filepath),
|
|
||||||
'data': json.dumps(final_result, ensure_ascii=False)
|
|
||||||
}
|
|
||||||
yield f"data: {json.dumps(complete_response, ensure_ascii=False)}\n\n"
|
|
||||||
|
|
||||||
# 发送最终响应
|
|
||||||
final_response = {
|
|
||||||
'message': 'File uploaded and processed successfully',
|
|
||||||
'filename': os.path.basename(downloaded_filepath),
|
|
||||||
'data': 'END'
|
|
||||||
}
|
|
||||||
|
|
||||||
yield f"data: {json.dumps(final_response)}\n\n"
|
|
||||||
|
|
||||||
finally:
|
|
||||||
end_time = time.time() # 记录结束时间
|
|
||||||
duration = end_time - start_time
|
|
||||||
logger.info(f"Total processing time: {duration:.2f} seconds")
|
|
||||||
|
|
||||||
|
|
||||||
@app.route('/api/test_zbparse', methods=['POST'])
|
|
||||||
def test_zbparse():
|
|
||||||
try:
|
|
||||||
return Response(stream_with_context(test_process_and_stream()), content_type='text/event-stream')
|
|
||||||
except Exception as e:
|
|
||||||
app.logger.error('Exception occurred: ' + str(e))
|
|
||||||
return jsonify({'error': str(e)}), 500
|
|
||||||
|
|
||||||
|
|
||||||
def test_process_and_stream():
|
|
||||||
# 模拟七段数据,每段包含指定的中文键名和更多详细数据
|
|
||||||
data_segments = [
|
|
||||||
{
|
|
||||||
"base_info": {
|
|
||||||
"基础信息": {
|
|
||||||
"project_name": "测试项目1",
|
|
||||||
"project_code": "TP001",
|
|
||||||
"project_manager": "张三",
|
|
||||||
"start_date": "2024-01-10",
|
|
||||||
"end_date": "2024-12-31"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"qualification_review": {
|
|
||||||
"资格审查": {
|
|
||||||
"review_criteria": ["公司资质", "过往业绩", "财务报表"],
|
|
||||||
"required_documents": ["营业执照", "资质证书", "近三年财务报告"],
|
|
||||||
"minimum_requirements": {
|
|
||||||
"company_age": "至少5年",
|
|
||||||
"past_projects": "至少3个大型项目"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"technical_standards": {
|
|
||||||
"技术标": {
|
|
||||||
"technical_requirements": ["设备质量要求", "施工工艺", "安全标准"],
|
|
||||||
"materials_list": ["钢筋", "水泥", "电缆"],
|
|
||||||
"equipment_specs": {
|
|
||||||
"excavator": "型号X123",
|
|
||||||
"concrete_mixer": "型号Y456"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"commercial_standards": {
|
|
||||||
"商务标": {
|
|
||||||
"pricing_method": "固定总价",
|
|
||||||
"payment_schedule": ["30%合同签订", "40%中期支付", "30%项目完成支付"],
|
|
||||||
"contract_conditions": {
|
|
||||||
"warranty_period": "2年",
|
|
||||||
"penalty_clauses": "延期每周罚款5%"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"invalid_requirements": {
|
|
||||||
"无效标与废标项": {
|
|
||||||
"common_issues": ["未按要求提交保证金", "技术标不达标"],
|
|
||||||
"invalidation_reasons": {
|
|
||||||
"missing_documents": "缺少必要文件",
|
|
||||||
"unqualified_technical_specs": "技术规格不合要求"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bidding_documents_requirements": {
|
|
||||||
"投标文件要求": {
|
|
||||||
"file_format": "PDF",
|
|
||||||
"submission_deadline": "2024-08-01 17:00",
|
|
||||||
"submission_location": "北京市某某大厦5楼",
|
|
||||||
"required_sections": ["公司简介", "技术方案", "商务报价"]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"opening_bid": {
|
|
||||||
"开评定标流程": {
|
|
||||||
"bid_opening_time": "2024-09-01 10:00",
|
|
||||||
"location": "会议室A",
|
|
||||||
"evaluation_criteria": ["价格", "技术能力", "项目经验"],
|
|
||||||
"evaluation_process": {
|
|
||||||
"first_round": "资格审查",
|
|
||||||
"second_round": "技术评分",
|
|
||||||
"final_round": "商务报价评定"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
filename = "test_file.pdf"
|
|
||||||
|
|
||||||
for i, data in enumerate(data_segments, 1):
|
|
||||||
response = {
|
|
||||||
'message': f'Processing segment {i}',
|
|
||||||
'filename': filename,
|
|
||||||
'data': data
|
|
||||||
}
|
|
||||||
yield f"data: {json.dumps(response, ensure_ascii=False)}\n\n"
|
|
||||||
time.sleep(3) # 每隔5秒发送一段数据
|
|
||||||
|
|
||||||
# 在结束信号之前发送完整的数据
|
|
||||||
combined_data = {}
|
|
||||||
for segment in data_segments:
|
|
||||||
for outer_key, inner_dict in segment.items():
|
|
||||||
# 获取内层字典的第一个(也是唯一的)键值对
|
|
||||||
inner_key, inner_value = next(iter(inner_dict.items()))
|
|
||||||
combined_data[inner_key] = inner_value
|
|
||||||
procurement_reqs = {
|
|
||||||
"采购项目": "混凝土",
|
|
||||||
"数量": "500立方米",
|
|
||||||
"规格": "C30"
|
|
||||||
}
|
|
||||||
tech_deviation_response = {
|
|
||||||
'message': 'procurement_reqs',
|
|
||||||
'filename': filename,
|
|
||||||
'data': json.dumps(procurement_reqs, ensure_ascii=False)
|
|
||||||
}
|
|
||||||
|
|
||||||
zige_deviation_table = {
|
|
||||||
"资格性检查": ["具有独立承担民事责任的能力;", "具有良好的商业信誉和健全的财务会计制度;",
|
|
||||||
"具有履行合同所必需的设备和专业技术能力;", "有依法缴纳税收和社会保障资金的良好记录;",
|
|
||||||
"参加政府采购活动前三年内,在经营活动中没有重大违法记录;"]
|
|
||||||
}
|
|
||||||
fuhe_deviation_table = {
|
|
||||||
"符合性检查": ["符合竞争性磋商文件第一部分竞争性磋商公告“六、其它补充事宜”第 1、2 条要求。",
|
|
||||||
"响应性文件签字和盖章齐全。", "满足磋商有效期。",
|
|
||||||
"供应商所提交的报价表未出现两个或两个以上不同报价。"]
|
|
||||||
}
|
|
||||||
shangwu_deviation_table = {
|
|
||||||
"商务要求偏离表": ["交货期(工期):合同签订之日起 15 个日历天内完成,并通过项目验收。", "采购人指定地点",
|
|
||||||
"本项目报价须为固定总价,包含但不限于:采购、实施、调试、试运行、验收、运维等所有完成本项目相关的一切费用。",
|
|
||||||
"项目质保期为一年,服务内容包括项目所有硬件和软件,质保期自项目验收合格后起算。",
|
|
||||||
"中标人在合同约定时间内完成所供货物的安装集成并投入正常使用,经采购人组织验收(终验)合格后,按合同约定及相关规定支付货款。"]
|
|
||||||
}
|
|
||||||
shangwu_star_deviation_table = {
|
|
||||||
"商务带星要求偏离表": ["交货期(工期):合同签订之日起 15 个日历天内完成,并通过项目验收。", "采购人指定地点",
|
|
||||||
"本项目报价须为固定总价,包含但不限于:采购、实施、调试、试运行、验收、运维等所有完成本项目相关的一切费用。",
|
|
||||||
"项目质保期为一年,服务内容包括项目所有硬件和软件,质保期自项目验收合格后起算。",
|
|
||||||
"中标人在合同约定时间内完成所供货物的安装集成并投入正常使用,经采购人组织验收(终验)合格后,按合同约定及相关规定支付货款。"]
|
|
||||||
}
|
|
||||||
jishu_star_deviation_table = {
|
|
||||||
"高清数字枪机": ["6、▲补光距离:红外光最远可达 30 m,白光最远可达 30 m", "▲音频: 1个内置麦克风"],
|
|
||||||
"高清数字半球机": ["▲音频:内置 1个麦克风", "▲供电方式: DC:12 V ± 25%,支持防反接保护; PoE: 802.3af,Class 3"],
|
|
||||||
"网络硬盘录像机": ["▲可接入 1T、2T、3T、4T、6T、8T、10T、12TB、14TB、16TB、 18TB、20TB容量的 SATA接口硬盘;",
|
|
||||||
"▲样机可在预览界面随意选择一个或多个通道,在预警面板实 时展示此通道的目标抓拍信息,包括:事件名称、事件触发时间、 人脸抓图;针对人脸比对同时显示姓名、相似度;针对车辆报警 同时显示车牌;针对人体和车辆目标,可分别显示“人体”、“车 辆”。",
|
|
||||||
"▲具有存储安全保障功能,当存储压力过高或硬盘出现性能 不足时,可优先录像业务存储;"]
|
|
||||||
}
|
|
||||||
|
|
||||||
zigefuhe_deviation_response = {
|
|
||||||
'message': 'zigefuhe_deviation',
|
|
||||||
'filename': filename,
|
|
||||||
'data': json.dumps(zige_deviation_table, ensure_ascii=False)
|
|
||||||
}
|
|
||||||
shangwu_deviation_response = {
|
|
||||||
'message': 'shangwu_deviation',
|
|
||||||
'filename': filename,
|
|
||||||
'data': json.dumps(shangwu_deviation_table, ensure_ascii=False)
|
|
||||||
}
|
|
||||||
shangwu_star_deviation_response = {
|
|
||||||
'message': 'shangwu_star_deviation',
|
|
||||||
'filename': filename,
|
|
||||||
'data': json.dumps(shangwu_star_deviation_table, ensure_ascii=False)
|
|
||||||
}
|
|
||||||
jishu_star_deviation_response = {
|
|
||||||
'message': 'jishu_star_deviation',
|
|
||||||
'filename': filename,
|
|
||||||
'data': json.dumps(jishu_star_deviation_table, ensure_ascii=False)
|
|
||||||
}
|
|
||||||
|
|
||||||
yield f"data:{json.dumps(zigefuhe_deviation_response, ensure_ascii=False)}\n\n"
|
|
||||||
yield f"data:{json.dumps(shangwu_deviation_response, ensure_ascii=False)}\n\n"
|
|
||||||
yield f"data:{json.dumps(shangwu_star_deviation_response, ensure_ascii=False)}\n\n"
|
|
||||||
yield f"data:{json.dumps(jishu_star_deviation_response, ensure_ascii=False)}\n\n"
|
|
||||||
yield f"data: {json.dumps(tech_deviation_response, ensure_ascii=False)}\n\n"
|
|
||||||
|
|
||||||
|
|
||||||
# 发送完整的大字典
|
|
||||||
complete_response = {
|
|
||||||
'message': 'Combined data',
|
|
||||||
'filename': filename,
|
|
||||||
'data': combined_data
|
|
||||||
}
|
|
||||||
yield f"data: {json.dumps(complete_response, ensure_ascii=False)}\n\n"
|
|
||||||
|
|
||||||
# 发送结束信号
|
|
||||||
final_response = {
|
|
||||||
'message': 'File processed successfully',
|
|
||||||
'filename': filename,
|
|
||||||
'data': 'END'
|
|
||||||
}
|
|
||||||
yield f"data: {json.dumps(final_response, ensure_ascii=False)}\n\n"
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# @app.route('/get_json', methods=['POST'])
|
|
||||||
# def testjson():
|
|
||||||
# final_json_path="C:\\Users\\Administrator\\Desktop\\fsdownload\\temp4\\fd55f067-2cf6-475c-b7ce-4498f6606bf6\\final_result.json"
|
|
||||||
# with open(final_json_path, 'r', encoding='utf-8') as f:
|
|
||||||
# print('final_json_path:'+final_json_path)
|
|
||||||
# zbparse_data = json.load(f)
|
|
||||||
# json_str = json.dumps(zbparse_data, ensure_ascii=False)
|
|
||||||
# print(json_str)
|
|
||||||
# return jsonify({
|
|
||||||
# 'message': 'File uploaded and processed successfully',
|
|
||||||
# 'filename': os.path.basename(final_json_path),
|
|
||||||
# 'data': json_str
|
|
||||||
# })
|
|
||||||
|
|
||||||
|
|
||||||
def remove_directory(path):
|
|
||||||
logger = g.logger
|
|
||||||
try:
|
|
||||||
shutil.rmtree(path)
|
|
||||||
logger.info(f"Successfully removed directory: {path}") # 使用全局 logger 记录
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to remove directory {path}: {str(e)}") # 使用全局 logger 记录
|
|
||||||
|
|
||||||
|
return app
|
||||||
|
|
||||||
|
#TODO:培训要求、总体要求、进度要求、'建设要求'到技术要求中,归类到其他要求中
|
||||||
|
#TODO:接口设置排队
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
app = create_app()
|
||||||
app.run(debug=True, host='0.0.0.0', port=5000)
|
app.run(debug=True, host='0.0.0.0', port=5000)
|
||||||
|
@ -109,7 +109,7 @@ def generate_user_query_template(required_keys,processed_filepath):
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
# 定义所有可能的键
|
# 定义所有可能的键
|
||||||
all_possible_keys = ["技术要求", "服务要求", "商务要求", "其他要求", "技术、服务要求"]
|
all_possible_keys = ["技术要求", "服务要求", "商务要求", "其他要求", "技术、服务要求","总体要求","进度要求","培训要求"]
|
||||||
|
|
||||||
# 定义每个键对应的示例内容
|
# 定义每个键对应的示例内容
|
||||||
example_content1 = {
|
example_content1 = {
|
||||||
@ -210,9 +210,45 @@ def generate_user_query_template(required_keys,processed_filepath):
|
|||||||
# 文本内容:{full_text}
|
# 文本内容:{full_text}
|
||||||
return user_query_template
|
return user_query_template
|
||||||
|
|
||||||
|
def merge_requirements(input_dict):
|
||||||
|
# 初始化一个临时字典,用于存储标准化后的键
|
||||||
|
temp_dict = {}
|
||||||
|
# 初始化最终字典,只包含指定的四个键
|
||||||
|
final_keys = ['技术要求', '商务要求', '服务要求', '其他要求']
|
||||||
|
final_dict = {key: "" for key in final_keys}
|
||||||
|
|
||||||
|
# 如果输入字典中有'其他要求',保留其内容
|
||||||
|
if '其他要求' in temp_dict and temp_dict['其他要求'].strip():
|
||||||
|
final_dict['其他要求'] = temp_dict['其他要求'].strip()
|
||||||
|
|
||||||
|
# 处理'技术要求', '商务要求', '服务要求'
|
||||||
|
for key in ['技术要求', '商务要求', '服务要求']:
|
||||||
|
if key in temp_dict:
|
||||||
|
final_dict[key] = temp_dict[key].strip()
|
||||||
|
|
||||||
|
# 收集需要合并到'其他要求'的内容
|
||||||
|
merge_keys = ['总体要求', '进度要求', '培训要求']
|
||||||
|
merged_contents = []
|
||||||
|
for key in merge_keys:
|
||||||
|
if key in temp_dict and temp_dict[key].strip():
|
||||||
|
merged_contents.append(temp_dict[key].strip())
|
||||||
|
|
||||||
|
# 如果有需要合并的内容
|
||||||
|
if merged_contents:
|
||||||
|
merged_text = " ".join(merged_contents)
|
||||||
|
if final_dict['其他要求']:
|
||||||
|
final_dict['其他要求'] += " " + merged_text
|
||||||
|
else:
|
||||||
|
final_dict['其他要求'] = merged_text
|
||||||
|
|
||||||
|
# 移除多余的空格
|
||||||
|
for key in final_dict:
|
||||||
|
final_dict[key] = final_dict[key].strip()
|
||||||
|
|
||||||
|
return final_dict
|
||||||
def get_business_requirements(procurement_path,processed_filepath):
|
def get_business_requirements(procurement_path,processed_filepath):
|
||||||
file_id=upload_file(procurement_path)
|
file_id=upload_file(procurement_path)
|
||||||
required_keys = ["技\s*术\s*要\s*求","商\s*务\s*要\s*求", "服\s*务\s*要\s*求", "其\s*他\s*要\s*求"]
|
required_keys = ["技\s*术\s*要\s*求","商\s*务\s*要\s*求", "服\s*务\s*要\s*求", "其\s*他\s*要\s*求","总\s*体\s*要\s*求","进\s*度\s*要\s*求","培\s*训\s*要\s*求"]
|
||||||
contained_keys=find_exists(procurement_path,required_keys)
|
contained_keys=find_exists(procurement_path,required_keys)
|
||||||
print(contained_keys)
|
print(contained_keys)
|
||||||
# queries = generate_queries(truncate_file, contained_keys)
|
# queries = generate_queries(truncate_file, contained_keys)
|
||||||
|
@ -827,7 +827,7 @@ def truncate_pdf_specific_goods(pdf_path, output_folder, selections,unique_id="1
|
|||||||
|
|
||||||
return truncate_files
|
return truncate_files
|
||||||
|
|
||||||
# TODO:交通智能系统和招标(1)(1)文件有问题 包头 绍兴 资格审查文件可能不需要默认与"evaluation"同一章 无效投标可能也要考虑 “more”的情况,类似工程标
|
# TODO:交通智能系统和招标(1)(1)文件有问题 包头 绍兴 工程标中,判断是符合性审查之后,可以将它们设为同一章
|
||||||
|
|
||||||
#ztbfile.pdf少资格评审 包头少符合性评审
|
#ztbfile.pdf少资格评审 包头少符合性评审
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -174,7 +174,6 @@ def process_folder(input_folder, output_folder):
|
|||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
print(f"Error processing {file_name}: {e}")
|
print(f"Error processing {file_name}: {e}")
|
||||||
|
|
||||||
#TODO: 投标人须知正文这块,序号可能是乱序的,目前保留了默认新序号大于旧序号,否则当作上个序号的正文。 但2-招标文件序号是混乱的,无解
|
|
||||||
#TODO:招标文件111_tobidders_notice_part2.pdf 陕西省公安厅交通警察总队高速公路交通安全智能感知巡查系统项目(1)_tobidders_notice_part2.pdf
|
#TODO:招标文件111_tobidders_notice_part2.pdf 陕西省公安厅交通警察总队高速公路交通安全智能感知巡查系统项目(1)_tobidders_notice_part2.pdf
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
# file_path = 'D:\\flask_project\\flask_app\\static\\output\\cfd4959d-5ea9-4112-8b50-9e543803f029\\ztbfile_tobidders_notice.pdf'
|
# file_path = 'D:\\flask_project\\flask_app\\static\\output\\cfd4959d-5ea9-4112-8b50-9e543803f029\\ztbfile_tobidders_notice.pdf'
|
||||||
|
@ -239,9 +239,7 @@ def goods_bid_main(output_folder, file_path, file_type, unique_id):
|
|||||||
|
|
||||||
#TODO:把所有未知都删掉。
|
#TODO:把所有未知都删掉。
|
||||||
#TODO:考虑把解析失败的调用豆包,全文上传。
|
#TODO:考虑把解析失败的调用豆包,全文上传。
|
||||||
#TODO:写个脚本确保技术参数没有嵌套
|
|
||||||
|
|
||||||
#TODO: start up 结构优化
|
|
||||||
#TODO:同系统下的多个货物,记录一下数量
|
#TODO:同系统下的多个货物,记录一下数量
|
||||||
#TODO:设备前面带星,而不是要求前面带星。
|
#TODO:设备前面带星,而不是要求前面带星。
|
||||||
#商务标这里改为列表最里层
|
#商务标这里改为列表最里层
|
||||||
|
Loading…
x
Reference in New Issue
Block a user