refactor(config): thread app_config through create_chat_model and get_available_tools

Phase 2 P2-7 (partial, Categories E+F): the two highest-traffic factory
functions accept optional app_config kw-only parameter. When make_lead_agent
is called with explicit app_config (client / Gateway path), config flows
through to the model factory and tool registration without the functions
consulting AppConfig.current().

Fallback path (app_config=None → AppConfig.current()) preserved so LangGraph
Server registration and legacy tests keep working. Remaining community tool
factories, memory subsystem, skills/sandbox helpers are similar mechanical
migrations but were out of scope for this sitting.
This commit is contained in:
greatmengqi 2026-04-17 00:35:55 +08:00
parent 6f5226a538
commit 6e3350e315
3 changed files with 20 additions and 8 deletions

View File

@ -354,8 +354,8 @@ def make_lead_agent(
if is_bootstrap:
# Special bootstrap agent with minimal prompt for initial custom agent creation flow
return create_agent(
model=create_chat_model(name=model_name, thinking_enabled=thinking_enabled),
tools=get_available_tools(model_name=model_name, subagent_enabled=subagent_enabled) + [setup_agent],
model=create_chat_model(name=model_name, thinking_enabled=thinking_enabled, app_config=app_config),
tools=get_available_tools(model_name=model_name, subagent_enabled=subagent_enabled, app_config=app_config) + [setup_agent],
middleware=_build_middlewares(app_config, config, model_name=model_name),
system_prompt=apply_prompt_template(subagent_enabled=subagent_enabled, max_concurrent_subagents=max_concurrent_subagents, available_skills=set(["bootstrap"])),
state_schema=ThreadState,
@ -364,8 +364,8 @@ def make_lead_agent(
# Default lead agent (unchanged behavior)
return create_agent(
model=create_chat_model(name=model_name, thinking_enabled=thinking_enabled, reasoning_effort=reasoning_effort),
tools=get_available_tools(model_name=model_name, groups=agent_config.tool_groups if agent_config else None, subagent_enabled=subagent_enabled),
model=create_chat_model(name=model_name, thinking_enabled=thinking_enabled, reasoning_effort=reasoning_effort, app_config=app_config),
tools=get_available_tools(model_name=model_name, groups=agent_config.tool_groups if agent_config else None, subagent_enabled=subagent_enabled, app_config=app_config),
middleware=_build_middlewares(app_config, config, model_name=model_name, agent_name=agent_name),
system_prompt=apply_prompt_template(
subagent_enabled=subagent_enabled, max_concurrent_subagents=max_concurrent_subagents, agent_name=agent_name, available_skills=set(agent_config.skills) if agent_config and agent_config.skills is not None else None

View File

@ -30,16 +30,24 @@ def _vllm_disable_chat_template_kwargs(chat_template_kwargs: dict) -> dict:
return disable_kwargs
def create_chat_model(name: str | None = None, thinking_enabled: bool = False, **kwargs) -> BaseChatModel:
def create_chat_model(
name: str | None = None,
thinking_enabled: bool = False,
*,
app_config: "AppConfig | None" = None,
**kwargs,
) -> BaseChatModel:
"""Create a chat model instance from the config.
Args:
name: The name of the model to create. If None, the first model in the config will be used.
app_config: Application config. Falls back to AppConfig.current() when
omitted; new callers should pass this explicitly.
Returns:
A chat model instance.
"""
config = AppConfig.current()
config = app_config if app_config is not None else AppConfig.current()
if name is None:
name = config.models[0].name
model_config = config.get_model_config(name)

View File

@ -37,6 +37,8 @@ def get_available_tools(
include_mcp: bool = True,
model_name: str | None = None,
subagent_enabled: bool = False,
*,
app_config: AppConfig | None = None,
) -> list[BaseTool]:
"""Get all available tools from config.
@ -48,11 +50,13 @@ def get_available_tools(
include_mcp: Whether to include tools from MCP servers (default: True).
model_name: Optional model name to determine if vision tools should be included.
subagent_enabled: Whether to include subagent tools (task, task_status).
app_config: Explicit application config. Falls back to AppConfig.current()
when omitted; new callers should pass this explicitly.
Returns:
List of available tools.
"""
config = AppConfig.current()
config = app_config if app_config is not None else AppConfig.current()
tool_configs = [tool for tool in config.tools if groups is None or tool.group in groups]
# Do not expose host bash by default when LocalSandboxProvider is active.
@ -125,7 +129,7 @@ def get_available_tools(
try:
from deerflow.tools.builtins.invoke_acp_agent_tool import build_invoke_acp_agent_tool
acp_agents = AppConfig.current().acp_agents
acp_agents = config.acp_agents
if acp_agents:
acp_tools.append(build_invoke_acp_agent_tool(acp_agents))
logger.info(f"Including invoke_acp_agent tool ({len(acp_agents)} agent(s): {list(acp_agents.keys())})")