NarratoAI/app/config/defaults.py
viccy 99dd4193ae feat(字幕): 新增阿里百炼 Fun-ASR 音视频字幕转录功能
- 在 WebUI 中增加 Fun-ASR 转录界面,支持上传多种音视频格式并生成 SRT 字幕
- 新增 `app/services/fun_asr_subtitle.py` 服务模块,实现完整的 REST API 调用流程,包括获取上传凭证、文件上传、提交任务、轮询结果和 SRT 格式转换
- 在配置文件中增加 `[fun_asr]` 配置段,支持保存 API Key
- 添加完整的单元测试,覆盖核心转换逻辑和服务流程
- 为兼容 Python 3.11 以下版本,将 `tomllib` 导入改为尝试导入并回退到 `tomli`
- 在 `defaults.py` 中添加 `from __future__ import annotations` 以支持类型注解
2026-04-27 18:15:54 +08:00

64 lines
2.3 KiB
Python

"""Shared config defaults used by both bootstrap and WebUI fallbacks."""
from __future__ import annotations
DEFAULT_OPENAI_COMPATIBLE_BASE_URL = "https://api.siliconflow.cn/v1"
DEFAULT_OPENAI_COMPATIBLE_PROVIDER = "openai"
DEFAULT_VISION_LLM_PROVIDER = DEFAULT_OPENAI_COMPATIBLE_PROVIDER
DEFAULT_VISION_OPENAI_MODEL_NAME = "Qwen/Qwen3.5-122B-A10B"
DEFAULT_TEXT_LLM_PROVIDER = DEFAULT_OPENAI_COMPATIBLE_PROVIDER
DEFAULT_TEXT_OPENAI_MODEL_NAME = "Pro/zai-org/GLM-5"
DEFAULT_LLM_APP_CONFIG = {
"vision_llm_provider": DEFAULT_VISION_LLM_PROVIDER,
"vision_openai_model_name": DEFAULT_VISION_OPENAI_MODEL_NAME,
"vision_openai_api_key": "",
"vision_openai_base_url": DEFAULT_OPENAI_COMPATIBLE_BASE_URL,
"text_llm_provider": DEFAULT_TEXT_LLM_PROVIDER,
"text_openai_model_name": DEFAULT_TEXT_OPENAI_MODEL_NAME,
"text_openai_api_key": "",
"text_openai_base_url": DEFAULT_OPENAI_COMPATIBLE_BASE_URL,
}
def build_default_app_config(app_config: dict | None = None) -> dict:
"""Force the shared LLM defaults into a fresh app config."""
merged = dict(app_config or {})
merged.update(DEFAULT_LLM_APP_CONFIG)
return merged
def merge_missing_app_defaults(app_config: dict | None = None) -> dict:
"""Backfill missing keys without overriding saved user values."""
merged = dict(app_config or {})
for key, value in DEFAULT_LLM_APP_CONFIG.items():
merged.setdefault(key, value)
return merged
def normalize_openai_compatible_model_name(
model_name: str,
provider: str = DEFAULT_OPENAI_COMPATIBLE_PROVIDER,
) -> str:
"""Strip only the internal OpenAI-compatible provider prefix if present."""
normalized = (model_name or "").strip()
provider_prefix = f"{provider}/"
if normalized.lower().startswith(provider_prefix):
return normalized[len(provider_prefix):]
return normalized
def get_openai_compatible_ui_values(
full_model_name: str,
default_model: str,
provider: str = DEFAULT_OPENAI_COMPATIBLE_PROVIDER,
) -> tuple[str, str]:
"""Keep the UI provider fixed while preserving the full model identifier."""
current_model = normalize_openai_compatible_model_name(
full_model_name or default_model,
provider=provider,
)
return provider, current_model or default_model