增加日志信息
This commit is contained in:
parent
50c83b0b11
commit
bbe7ae0ce9
@ -14,7 +14,7 @@ git clone地址:http://47.98.59.178:3000/zy123/zbparse.git
|
|||||||
|
|
||||||
## 项目结构:
|
## 项目结构:
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
.env存放一些密钥(大模型、textin等),它是gitignore忽略了,因此在服务器上git pull项目的时候,这个文件不会更新(因为密钥比较重要),需要手动维护服务器相应位置的.env。
|
.env存放一些密钥(大模型、textin等),它是gitignore忽略了,因此在服务器上git pull项目的时候,这个文件不会更新(因为密钥比较重要),需要手动维护服务器相应位置的.env。
|
||||||
|
|
||||||
@ -22,7 +22,7 @@ git clone地址:http://47.98.59.178:3000/zy123/zbparse.git
|
|||||||
|
|
||||||
1. 进入项目文件夹
|
1. 进入项目文件夹
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
**注意:**需要确认.env是否存在在服务器,默认是隐藏的
|
**注意:**需要确认.env是否存在在服务器,默认是隐藏的
|
||||||
输入cat .env
|
输入cat .env
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
# flask_app/routes/upload.py
|
# flask_app/routes/upload.py
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
|
import urllib.parse
|
||||||
from flask import Blueprint, request, jsonify,g
|
from flask import Blueprint, request, jsonify,g
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
@ -57,6 +57,8 @@ def zbparse(): #大解析
|
|||||||
logger.info("大解析开始!!!")
|
logger.info("大解析开始!!!")
|
||||||
received_data = request.get_json()
|
received_data = request.get_json()
|
||||||
logger.info("Received JSON data: " + str(received_data))
|
logger.info("Received JSON data: " + str(received_data))
|
||||||
|
file_name = urllib.parse.unquote(file_url).split('/')[-1]
|
||||||
|
logger.info(f"Starting parsing file: {file_name}")
|
||||||
file_url = g.file_url
|
file_url = g.file_url
|
||||||
zb_type = g.zb_type
|
zb_type = g.zb_type
|
||||||
try:
|
try:
|
||||||
@ -65,7 +67,7 @@ def zbparse(): #大解析
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error('Exception occurred: ' + str(e))
|
logger.error('Exception occurred: ' + str(e))
|
||||||
if hasattr(g, 'unique_id'):
|
if hasattr(g, 'unique_id'):
|
||||||
log_error_unique_id(g.unique_id,1)
|
log_error_unique_id(g.unique_id,1,file_name=file_name)
|
||||||
error_response = create_response(
|
error_response = create_response(
|
||||||
message='处理文件时发生异常',
|
message='处理文件时发生异常',
|
||||||
status='error',
|
status='error',
|
||||||
@ -75,7 +77,7 @@ def zbparse(): #大解析
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error('Unexpected exception: ' + str(e))
|
logger.error('Unexpected exception: ' + str(e))
|
||||||
if hasattr(g, 'unique_id'):
|
if hasattr(g, 'unique_id'):
|
||||||
log_error_unique_id(g.unique_id,1)
|
log_error_unique_id(g.unique_id,1,file_name=file_name)
|
||||||
error_response = create_response(
|
error_response = create_response(
|
||||||
message='内部服务器错误',
|
message='内部服务器错误',
|
||||||
status='error',
|
status='error',
|
||||||
@ -92,12 +94,12 @@ def process_and_stream(file_url, zb_type):
|
|||||||
filename = "ztbfile"
|
filename = "ztbfile"
|
||||||
downloaded_filename = os.path.join(output_folder, filename)
|
downloaded_filename = os.path.join(output_folder, filename)
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
|
file_name = urllib.parse.unquote(file_url).split('/')[-1]
|
||||||
try:
|
try:
|
||||||
downloaded_filepath, file_type = download_file(file_url, downloaded_filename, True)
|
downloaded_filepath, file_type = download_file(file_url, downloaded_filename, True)
|
||||||
if not downloaded_filepath or file_type == 4:
|
if not downloaded_filepath or file_type == 4:
|
||||||
logger.error("下载文件失败或不支持的文件类型")
|
logger.error("下载文件失败或不支持的文件类型")
|
||||||
log_error_unique_id(unique_id, 1)
|
log_error_unique_id(unique_id, 1, file_name=file_name)
|
||||||
error_response = create_response(
|
error_response = create_response(
|
||||||
message='下载文件失败或不支持的文件类型',
|
message='下载文件失败或不支持的文件类型',
|
||||||
status='error',
|
status='error',
|
||||||
@ -185,7 +187,7 @@ def process_and_stream(file_url, zb_type):
|
|||||||
logger.info(f"摘取后的数据已保存到 '{extracted_info_path}'")
|
logger.info(f"摘取后的数据已保存到 '{extracted_info_path}'")
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
logger.error(f"保存JSON文件时出错: {e}")
|
logger.error(f"保存JSON文件时出错: {e}")
|
||||||
log_error_unique_id(unique_id,1) # 记录失败的 unique_id
|
log_error_unique_id(unique_id,1,file_name=file_name) # 记录失败的 unique_id
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(output_json_path, 'w', encoding='utf-8') as json_file:
|
with open(output_json_path, 'w', encoding='utf-8') as json_file:
|
||||||
@ -193,7 +195,7 @@ def process_and_stream(file_url, zb_type):
|
|||||||
logger.info(f"合并后的数据已保存到 '{output_json_path}'")
|
logger.info(f"合并后的数据已保存到 '{output_json_path}'")
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
logger.error(f"保存JSON文件时出错: {e}")
|
logger.error(f"保存JSON文件时出错: {e}")
|
||||||
log_error_unique_id(unique_id,1) # 记录失败的 unique_id
|
log_error_unique_id(unique_id,1,file_name=file_name) # 记录失败的 unique_id
|
||||||
|
|
||||||
extracted_info_response = create_response(
|
extracted_info_response = create_response(
|
||||||
message='extracted_info',
|
message='extracted_info',
|
||||||
@ -218,7 +220,7 @@ def process_and_stream(file_url, zb_type):
|
|||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Unexpected error in process_and_stream: {e}")
|
logger.error(f"Unexpected error in process_and_stream: {e}")
|
||||||
log_error_unique_id(unique_id,1) # 记录失败的 unique_id
|
log_error_unique_id(unique_id,1,file_name=file_name) # 记录失败的 unique_id
|
||||||
error_response = create_response(
|
error_response = create_response(
|
||||||
message='内部服务器错误',
|
message='内部服务器错误',
|
||||||
status='error',
|
status='error',
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
from datetime import datetime
|
||||||
from flask import request, jsonify, current_app, g
|
from flask import request, jsonify, current_app, g
|
||||||
from flask_app.general.llm.清除file_id import read_file_ids, delete_file_by_ids
|
from flask_app.general.llm.清除file_id import read_file_ids, delete_file_by_ids
|
||||||
from flask_app.logger_setup import create_logger
|
from flask_app.logger_setup import create_logger
|
||||||
@ -188,7 +189,7 @@ def sse_format(response):
|
|||||||
return f"data: {json.dumps(response, ensure_ascii=False)}\n\n"
|
return f"data: {json.dumps(response, ensure_ascii=False)}\n\n"
|
||||||
|
|
||||||
|
|
||||||
def log_error_unique_id(unique_id, selection=6):
|
def log_error_unique_id(unique_id, selection=6, file_name="x"):
|
||||||
"""
|
"""
|
||||||
记录失败调用的 unique_id 到对应的错误日志文件中。
|
记录失败调用的 unique_id 到对应的错误日志文件中。
|
||||||
|
|
||||||
@ -211,7 +212,8 @@ def log_error_unique_id(unique_id, selection=6):
|
|||||||
# 获取对应的错误文件名,如果 selection 不在映射中,则使用默认文件名
|
# 获取对应的错误文件名,如果 selection 不在映射中,则使用默认文件名
|
||||||
error_file_name = error_file_map.get(selection, 'general_error.txt')
|
error_file_name = error_file_map.get(selection, 'general_error.txt')
|
||||||
error_file_path = os.path.join(error_dir, error_file_name)
|
error_file_path = os.path.join(error_dir, error_file_name)
|
||||||
|
current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
|
||||||
# 将 unique_id 写入错误文件
|
# 将 unique_id 写入错误文件
|
||||||
with open(error_file_path, 'a', encoding='utf-8') as f:
|
with open(error_file_path, 'a', encoding='utf-8') as f:
|
||||||
f.write(f"{unique_id}\n")
|
f.write(f"{current_time} - {file_name} - {unique_id}\n")
|
||||||
|
Loading…
x
Reference in New Issue
Block a user