From 017f398cb111261b5200574ed8b9be1bd06608cf Mon Sep 17 00:00:00 2001 From: linyq Date: Thu, 8 May 2025 11:23:34 +0800 Subject: [PATCH] =?UTF-8?q?=E5=9C=A8generate=5Fscript=5Fdocu.py=E4=B8=AD?= =?UTF-8?q?=E6=96=B0=E5=A2=9E=E6=97=B6=E9=97=B4=E6=88=B3=E5=8A=9F=E8=83=BD?= =?UTF-8?q?=EF=BC=8C=E4=BC=98=E5=8C=96=E5=88=86=E6=9E=90=E7=BB=93=E6=9E=9C?= =?UTF-8?q?=E4=BF=9D=E5=AD=98=E9=80=BB=E8=BE=91=EF=BC=8C=E7=A1=AE=E4=BF=9D?= =?UTF-8?q?=E5=88=86=E6=9E=90=E7=BB=93=E6=9E=9C=E4=BB=A5JSON=E6=A0=BC?= =?UTF-8?q?=E5=BC=8F=E4=BF=9D=E5=AD=98=E5=88=B0=E6=8C=87=E5=AE=9A=E7=9B=AE?= =?UTF-8?q?=E5=BD=95=EF=BC=8C=E5=B9=B6=E8=AE=B0=E5=BD=95=E4=BF=9D=E5=AD=98?= =?UTF-8?q?=E8=B7=AF=E5=BE=84=E3=80=82=E7=A7=BB=E9=99=A4=E6=97=A7=E7=9A=84?= =?UTF-8?q?=E6=96=87=E6=9C=AC=E6=A0=BC=E5=BC=8F=E8=BE=93=E5=87=BA=E9=80=BB?= =?UTF-8?q?=E8=BE=91=EF=BC=8C=E7=AE=80=E5=8C=96=E4=BB=A3=E7=A0=81=E7=BB=93?= =?UTF-8?q?=E6=9E=84=E3=80=82?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- webui/tools/generate_script_docu.py | 32 ++++++++++++++++------------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/webui/tools/generate_script_docu.py b/webui/tools/generate_script_docu.py index 21abcab..60ef2c0 100644 --- a/webui/tools/generate_script_docu.py +++ b/webui/tools/generate_script_docu.py @@ -9,6 +9,7 @@ from app.utils import video_processor import streamlit as st from loguru import logger from requests.adapters import HTTPAdapter +from datetime import datetime from app.config import config from app.utils.script_generator import ScriptProcessor @@ -164,6 +165,9 @@ def generate_script_docu(params): ) loop.close() + """ + 3. 处理分析结果(格式化为 json 数据) + """ # ===================处理分析结果=================== update_progress(60, "正在整理分析结果...") @@ -282,24 +286,24 @@ def generate_script_docu(params): "overall_activity_summaries": overall_activity_summaries } + # 使用当前时间创建文件名 + now = datetime.now() + timestamp_str = now.strftime("%Y%m%d_%H%M") + + # 确保分析目录存在 + analysis_dir = os.path.join(utils.storage_dir(), "temp", "analysis") + os.makedirs(analysis_dir, exist_ok=True) + # 保存完整的分析结果为JSON - analysis_json_path = os.path.join(utils.task_dir(), "frame_analysis.json") + analysis_filename = f"frame_analysis_{timestamp_str}.json" + analysis_json_path = os.path.join(analysis_dir, analysis_filename) with open(analysis_json_path, 'w', encoding='utf-8') as f: json.dump(merged_results, f, ensure_ascii=False, indent=2) - - # 同时保存原始文本格式的分析结果(兼容性) - if not frame_analysis.strip() and merged_frame_observations: - # 如果没有原始文本但有合并结果,则从合并结果生成文本 - frame_analysis = json.dumps(merged_results, ensure_ascii=False, indent=2) - - if not frame_analysis.strip(): - raise Exception("未能生成有效的帧分析结果") - - # # 保存文本格式分析结果 - # analysis_path = os.path.join(utils.temp_dir(), "frame_analysis.txt") - # with open(analysis_path, 'w', encoding='utf-8') as f: - # f.write(frame_analysis) + logger.info(f"分析结果已保存到: {analysis_json_path}") + """ + 4. 生成文案 + """ update_progress(70, "正在生成脚本...") # 从配置中获取文本生成相关配置