Merge branch 'main' into rayhpeng/persistence-scaffold

This commit is contained in:
rayhpeng 2026-04-06 10:22:53 +08:00 committed by GitHub
commit e4e4320af5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 75 additions and 4 deletions

View File

@ -345,6 +345,7 @@ class DeerFlowClient:
Yields:
StreamEvent with one of:
- type="values" data={"title": str|None, "messages": [...], "artifacts": [...]}
- type="custom" data={...}
- type="messages-tuple" data={"type": "ai", "content": str, "id": str}
- type="messages-tuple" data={"type": "ai", "content": str, "id": str, "usage_metadata": {...}}
- type="messages-tuple" data={"type": "ai", "content": "", "id": str, "tool_calls": [...]}
@ -365,7 +366,22 @@ class DeerFlowClient:
seen_ids: set[str] = set()
cumulative_usage: dict[str, int] = {"input_tokens": 0, "output_tokens": 0, "total_tokens": 0}
for chunk in self._agent.stream(state, config=config, context=context, stream_mode="values"):
for item in self._agent.stream(
state,
config=config,
context=context,
stream_mode=["values", "custom"],
):
if isinstance(item, tuple) and len(item) == 2:
mode, chunk = item
mode = str(mode)
else:
mode, chunk = "values", item
if mode == "custom":
yield StreamEvent(type="custom", data=chunk)
continue
messages = chunk.get("messages", [])
for msg in messages:

View File

@ -5,6 +5,7 @@ import concurrent.futures
import json
import tempfile
import zipfile
from enum import Enum
from pathlib import Path
from unittest.mock import MagicMock, patch
@ -205,6 +206,33 @@ class TestStream:
msg_events = _ai_events(events)
assert msg_events[0].data["content"] == "Hello!"
def test_custom_events_are_forwarded(self, client):
"""stream() forwards custom stream events alongside normal values output."""
ai = AIMessage(content="Hello!", id="ai-1")
agent = MagicMock()
agent.stream.return_value = iter(
[
("custom", {"type": "task_started", "task_id": "task-1"}),
("values", {"messages": [HumanMessage(content="hi", id="h-1"), ai]}),
]
)
with (
patch.object(client, "_ensure_agent"),
patch.object(client, "_agent", agent),
):
events = list(client.stream("hi", thread_id="t-custom"))
agent.stream.assert_called_once()
call_kwargs = agent.stream.call_args.kwargs
assert call_kwargs["stream_mode"] == ["values", "custom"]
assert events[0].type == "custom"
assert events[0].data == {"type": "task_started", "task_id": "task-1"}
assert any(event.type == "messages-tuple" and event.data["content"] == "Hello!" for event in events)
assert any(event.type == "values" for event in events)
assert events[-1].type == "end"
def test_context_propagation(self, client):
"""stream() passes agent_name to the context."""
agent = _make_agent_mock([{"messages": [AIMessage(content="ok", id="ai-1")]}])
@ -222,6 +250,33 @@ class TestStream:
assert call_kwargs["context"]["thread_id"] == "t1"
assert call_kwargs["context"]["agent_name"] == "test-agent-1"
def test_custom_mode_is_normalized_to_string(self, client):
"""stream() forwards custom events even when the mode is not a plain string."""
class StreamMode(Enum):
CUSTOM = "custom"
def __str__(self):
return self.value
agent = _make_agent_mock(
[
(StreamMode.CUSTOM, {"type": "task_started", "task_id": "task-1"}),
{"messages": [AIMessage(content="Hello!", id="ai-1")]},
]
)
with (
patch.object(client, "_ensure_agent"),
patch.object(client, "_agent", agent),
):
events = list(client.stream("hi", thread_id="t-custom-enum"))
assert events[0].type == "custom"
assert events[0].data == {"type": "task_started", "task_id": "task-1"}
assert any(event.type == "messages-tuple" and event.data["content"] == "Hello!" for event in events)
assert events[-1].type == "end"
def test_tool_call_and_result(self, client):
"""stream() emits messages-tuple events for tool calls and results."""
ai = AIMessage(content="", id="ai-1", tool_calls=[{"name": "bash", "args": {"cmd": "ls"}, "id": "tc-1"}])

View File

@ -123,7 +123,7 @@ services:
UV_IMAGE: ${UV_IMAGE:-ghcr.io/astral-sh/uv:0.7.20}
UV_INDEX_URL: ${UV_INDEX_URL:-https://pypi.org/simple}
container_name: deer-flow-gateway
command: sh -c "cd backend && uv sync && PYTHONPATH=. uv run uvicorn app.gateway.app:app --host 0.0.0.0 --port 8001 --reload --reload-include='*.yaml .env' > /app/logs/gateway.log 2>&1"
command: sh -c "{ cd backend && (uv sync || (echo '[startup] uv sync failed; recreating .venv and retrying once' && uv venv --allow-existing .venv && uv sync)) && PYTHONPATH=. uv run uvicorn app.gateway.app:app --host 0.0.0.0 --port 8001 --reload --reload-include='*.yaml .env'; } > /app/logs/gateway.log 2>&1"
volumes:
- ../backend/:/app/backend/
# Preserve the .venv built during Docker image build — mounting the full backend/
@ -180,7 +180,7 @@ services:
UV_IMAGE: ${UV_IMAGE:-ghcr.io/astral-sh/uv:0.7.20}
UV_INDEX_URL: ${UV_INDEX_URL:-https://pypi.org/simple}
container_name: deer-flow-langgraph
command: sh -c "cd backend && uv sync && allow_blocking='' && if [ \"\${LANGGRAPH_ALLOW_BLOCKING:-0}\" = '1' ]; then allow_blocking='--allow-blocking'; fi && uv run langgraph dev --no-browser \${allow_blocking} --host 0.0.0.0 --port 2024 --n-jobs-per-worker \${LANGGRAPH_JOBS_PER_WORKER:-10} > /app/logs/langgraph.log 2>&1"
command: sh -c "cd backend && { (uv sync || (echo '[startup] uv sync failed; recreating .venv and retrying once' && uv venv --allow-existing .venv && uv sync)) && allow_blocking='' && if [ \"\${LANGGRAPH_ALLOW_BLOCKING:-0}\" = '1' ]; then allow_blocking='--allow-blocking'; fi && uv run langgraph dev --no-browser \${allow_blocking} --host 0.0.0.0 --port 2024 --n-jobs-per-worker \${LANGGRAPH_JOBS_PER_WORKER:-10}; } > /app/logs/langgraph.log 2>&1"
volumes:
- ../backend/:/app/backend/
# Preserve the .venv built during Docker image build — mounting the full backend/

View File

@ -127,7 +127,7 @@ services:
UV_INDEX_URL: ${UV_INDEX_URL:-https://pypi.org/simple}
UV_EXTRAS: ${UV_EXTRAS:-}
container_name: deer-flow-langgraph
command: sh -c 'cd /app/backend && allow_blocking_flag="" && if [ "${LANGGRAPH_ALLOW_BLOCKING:-0}" = "1" ]; then allow_blocking_flag="--allow-blocking"; fi && uv run langgraph dev --no-browser ${allow_blocking_flag} --no-reload --host 0.0.0.0 --port 2024 --n-jobs-per-worker ${LANGGRAPH_JOBS_PER_WORKER:-10}'
command: sh -c 'cd /app/backend && allow_blocking="" && if [ "\${LANGGRAPH_ALLOW_BLOCKING:-0}" = "1" ]; then allow_blocking="--allow-blocking"; fi && uv run langgraph dev --no-browser \${allow_blocking} --no-reload --host 0.0.0.0 --port 2024 --n-jobs-per-worker \${LANGGRAPH_JOBS_PER_WORKER:-10}'
volumes:
- ${DEER_FLOW_CONFIG_PATH}:/app/backend/config.yaml:ro
- ${DEER_FLOW_EXTENSIONS_CONFIG_PATH}:/app/backend/extensions_config.json:ro