NarratoAI/app/config/defaults.py
linyq 16dbbf3461 refactor(config): 重构配置系统以支持默认值和模型名称规范化
- 新增 defaults.py 提供共享默认配置和模型名称处理工具
- 重构 config.py 使用默认值填充缺失配置
- 修改 openai_compatible_provider.py 简化模型名称处理逻辑
- 更新 WebUI 组件使用新的默认值系统
- 添加测试用例验证配置引导和模型名称处理
2026-03-28 00:34:01 +08:00

62 lines
2.2 KiB
Python

"""Shared config defaults used by both bootstrap and WebUI fallbacks."""
DEFAULT_OPENAI_COMPATIBLE_BASE_URL = "https://api.siliconflow.cn/v1"
DEFAULT_OPENAI_COMPATIBLE_PROVIDER = "openai"
DEFAULT_VISION_LLM_PROVIDER = DEFAULT_OPENAI_COMPATIBLE_PROVIDER
DEFAULT_VISION_OPENAI_MODEL_NAME = "Qwen/Qwen3.5-122B-A10B"
DEFAULT_TEXT_LLM_PROVIDER = DEFAULT_OPENAI_COMPATIBLE_PROVIDER
DEFAULT_TEXT_OPENAI_MODEL_NAME = "Pro/zai-org/GLM-5"
DEFAULT_LLM_APP_CONFIG = {
"vision_llm_provider": DEFAULT_VISION_LLM_PROVIDER,
"vision_openai_model_name": DEFAULT_VISION_OPENAI_MODEL_NAME,
"vision_openai_api_key": "",
"vision_openai_base_url": DEFAULT_OPENAI_COMPATIBLE_BASE_URL,
"text_llm_provider": DEFAULT_TEXT_LLM_PROVIDER,
"text_openai_model_name": DEFAULT_TEXT_OPENAI_MODEL_NAME,
"text_openai_api_key": "",
"text_openai_base_url": DEFAULT_OPENAI_COMPATIBLE_BASE_URL,
}
def build_default_app_config(app_config: dict | None = None) -> dict:
"""Force the shared LLM defaults into a fresh app config."""
merged = dict(app_config or {})
merged.update(DEFAULT_LLM_APP_CONFIG)
return merged
def merge_missing_app_defaults(app_config: dict | None = None) -> dict:
"""Backfill missing keys without overriding saved user values."""
merged = dict(app_config or {})
for key, value in DEFAULT_LLM_APP_CONFIG.items():
merged.setdefault(key, value)
return merged
def normalize_openai_compatible_model_name(
model_name: str,
provider: str = DEFAULT_OPENAI_COMPATIBLE_PROVIDER,
) -> str:
"""Strip only the internal OpenAI-compatible provider prefix if present."""
normalized = (model_name or "").strip()
provider_prefix = f"{provider}/"
if normalized.lower().startswith(provider_prefix):
return normalized[len(provider_prefix):]
return normalized
def get_openai_compatible_ui_values(
full_model_name: str,
default_model: str,
provider: str = DEFAULT_OPENAI_COMPATIBLE_PROVIDER,
) -> tuple[str, str]:
"""Keep the UI provider fixed while preserving the full model identifier."""
current_model = normalize_openai_compatible_model_name(
full_model_name or default_model,
provider=provider,
)
return provider, current_model or default_model