mirror of
https://github.com/bytedance/deer-flow.git
synced 2026-04-25 11:18:22 +00:00
* feat(gateway): implement LangGraph Platform API in Gateway, replace langgraph-cli
Implement all core LangGraph Platform API endpoints in the Gateway,
allowing it to fully replace the langgraph-cli dev server for local
development. This eliminates a heavyweight dependency and simplifies
the development stack.
Changes:
- Add runs lifecycle endpoints (create, stream, wait, cancel, join)
- Add threads CRUD and search endpoints
- Add assistants compatibility endpoints (search, get, graph, schemas)
- Add StreamBridge (in-memory pub/sub for SSE) and async provider
- Add RunManager with atomic create_or_reject (eliminates TOCTOU race)
- Add worker with interrupt/rollback cancel actions and runtime context injection
- Route /api/langgraph/* to Gateway in nginx config
- Skip langgraph-cli startup by default (SKIP_LANGGRAPH_SERVER=0 to restore)
- Add unit tests for RunManager, SSE format, and StreamBridge
* fix: drain bridge queue on client disconnect to prevent backpressure
When on_disconnect=continue, keep consuming events from the bridge
without yielding, so the worker is not blocked by a full queue.
Only on_disconnect=cancel breaks out immediately.
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
* fix: remove pytest import
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
* fix: Fix default stream_mode to ["values", "messages-tuple"]
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
* fix: Remove unused if_exists field from ThreadCreateRequest
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
* fix: address review comments on gateway LangGraph API
- Mount runs.py router in app.py (missing include_router)
- Normalize interrupt_before/after "*" to node list before run_agent()
- Use entry.id for SSE event ID instead of counter
- Drain bridge queue on disconnect when on_disconnect=continue
- Reuse serialization helper in wait_run() for consistent wire format
- Reject unsupported multitask_strategy with 400
- Remove SKIP_LANGGRAPH_SERVER fallback, always use Gateway
* feat: extract app.state access into deps.py
Encapsulate read/write operations for singleton objects (RunManager,
StreamBridge, checkpointer) held in app.state into a shared utility,
reducing repeated access patterns across router modules.
* feat: extract deerflow.runtime.serialization module with tests
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
* refactor: replace duplicated serialization with deerflow.runtime.serialization
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
* feat: extract app/gateway/services.py with run lifecycle logic
Create a service layer that centralizes SSE formatting, input/config
normalization, and run lifecycle management. Router modules will delegate
to these functions instead of using private cross-imported helpers.
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
* refactor: wire routers to use services layer, remove cross-module private imports
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
* style: apply ruff formatting to refactored files
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
* feat(runtime): support LangGraph dev server and add compat route
- Enable official LangGraph dev server for local development workflow
- Decouple runtime components from agents package for better separation
- Provide gateway-backed fallback route when dev server is skipped
- Simplify lifecycle management using context manager in gateway
* feat(runtime): add Store providers with auto-backend selection
- Add async_provider.py and provider.py under deerflow/runtime/store/
- Support memory, sqlite, postgres backends matching checkpointer config
- Integrate into FastAPI lifespan via AsyncExitStack in deps.py
- Replace hardcoded InMemoryStore with config-driven factory
* refactor(gateway): migrate thread management from checkpointer to Store and resolve multiple endpoint failures
- Add Store-backed CRUD helpers (_store_get, _store_put, _store_upsert)
- Replace checkpoint-scanning search with two-phase strategy:
phase 1 reads Store (O(threads)), phase 2 backfills from checkpointer
for legacy/LangGraph Server threads with lazy migration
- Extend Store record schema with values field for title persistence
- Sync thread title from checkpoint to Store after run completion
- Fix /threads/{id}/runs/{run_id}/stream 405 by accepting both
GET and POST methods; POST handles interrupt/rollback actions
- Fix /threads/{id}/state 500 by separating read_config and
write_config, adding checkpoint_ns to configurable, and
shallow-copying checkpoint/metadata before mutation
- Sync title to Store on state update for immediate search reflection
- Move _upsert_thread_in_store into services.py, remove duplicate logic
- Add _sync_thread_title_after_run: await run task, read final
checkpoint title, write back to Store record
- Spawn title sync as background task from start_run when Store exists
* refactor(runtime): deduplicate store and checkpointer provider logic
Extract _ensure_sqlite_parent_dir() helper into checkpointer/provider.py
and use it in all three places that previously inlined the same mkdir logic.
Consolidate duplicate error constants in store/async_provider.py by importing
from store/provider.py instead of redefining them.
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
* refactor(runtime): move SQLite helpers to runtime/store, checkpointer imports from store
_resolve_sqlite_conn_str and _ensure_sqlite_parent_dir now live in
runtime/store/provider.py. agents/checkpointer/provider and
agents/checkpointer/async_provider import from there, reversing the
previous dependency direction (store → checkpointer becomes
checkpointer → store).
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
* refactor(runtime): extract SQLite helpers into runtime/store/_sqlite_utils.py
Move resolve_sqlite_conn_str and ensure_sqlite_parent_dir out of
checkpointer/provider.py into a dedicated _sqlite_utils module.
Functions are now public (no underscore prefix), making cross-module
imports semantically correct. All four provider files import from
the single shared location.
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
* fix(gateway): use adelete_thread to fully remove thread checkpoints on delete
AsyncSqliteSaver has no adelete method — the previous hasattr check
always evaluated to False, silently leaving all checkpoint rows in the
database. Switch to adelete_thread(thread_id) which deletes every
checkpoint and pending-write row for the thread across all namespaces
(including sub-graph checkpoints).
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
* fix(gateway): remove dead bridge_cm/ckpt_cm code and fix StrEnum lint
app.py had unreachable code after the async-with lifespan refactor:
bridge_cm and ckpt_cm were referenced but never defined (F821), and
the channel service startup/shutdown was outside the langgraph_runtime
block so it never ran. Move channel service lifecycle inside the
async-with block where it belongs.
Replace str+Enum inheritance in RunStatus and DisconnectMode with
StrEnum as suggested by UP042.
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
* style: format with ruff
---------
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: JeffJiang <for-eleven@hotmail.com>
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
Co-authored-by: Willem Jiang <willem.jiang@gmail.com>
153 lines
4.8 KiB
Python
153 lines
4.8 KiB
Python
"""Tests for the in-memory StreamBridge implementation."""
|
|
|
|
import asyncio
|
|
import re
|
|
|
|
import pytest
|
|
|
|
from deerflow.runtime import END_SENTINEL, HEARTBEAT_SENTINEL, MemoryStreamBridge, make_stream_bridge
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# Unit tests for MemoryStreamBridge
|
|
# ---------------------------------------------------------------------------
|
|
|
|
|
|
@pytest.fixture
|
|
def bridge() -> MemoryStreamBridge:
|
|
return MemoryStreamBridge(queue_maxsize=256)
|
|
|
|
|
|
@pytest.mark.anyio
|
|
async def test_publish_subscribe(bridge: MemoryStreamBridge):
|
|
"""Three events followed by end should be received in order."""
|
|
run_id = "run-1"
|
|
|
|
await bridge.publish(run_id, "metadata", {"run_id": run_id})
|
|
await bridge.publish(run_id, "values", {"messages": []})
|
|
await bridge.publish(run_id, "updates", {"step": 1})
|
|
await bridge.publish_end(run_id)
|
|
|
|
received = []
|
|
async for entry in bridge.subscribe(run_id, heartbeat_interval=1.0):
|
|
received.append(entry)
|
|
if entry is END_SENTINEL:
|
|
break
|
|
|
|
assert len(received) == 4
|
|
assert received[0].event == "metadata"
|
|
assert received[1].event == "values"
|
|
assert received[2].event == "updates"
|
|
assert received[3] is END_SENTINEL
|
|
|
|
|
|
@pytest.mark.anyio
|
|
async def test_heartbeat(bridge: MemoryStreamBridge):
|
|
"""When no events arrive within the heartbeat interval, yield a heartbeat."""
|
|
run_id = "run-heartbeat"
|
|
bridge._get_or_create_queue(run_id) # ensure queue exists
|
|
|
|
received = []
|
|
|
|
async def consumer():
|
|
async for entry in bridge.subscribe(run_id, heartbeat_interval=0.1):
|
|
received.append(entry)
|
|
if entry is HEARTBEAT_SENTINEL:
|
|
break
|
|
|
|
await asyncio.wait_for(consumer(), timeout=2.0)
|
|
assert len(received) == 1
|
|
assert received[0] is HEARTBEAT_SENTINEL
|
|
|
|
|
|
@pytest.mark.anyio
|
|
async def test_cleanup(bridge: MemoryStreamBridge):
|
|
"""After cleanup, the run's queue is removed."""
|
|
run_id = "run-cleanup"
|
|
await bridge.publish(run_id, "test", {})
|
|
assert run_id in bridge._queues
|
|
|
|
await bridge.cleanup(run_id)
|
|
assert run_id not in bridge._queues
|
|
assert run_id not in bridge._counters
|
|
|
|
|
|
@pytest.mark.anyio
|
|
async def test_backpressure():
|
|
"""With maxsize=1, publish should not block forever."""
|
|
bridge = MemoryStreamBridge(queue_maxsize=1)
|
|
run_id = "run-bp"
|
|
|
|
await bridge.publish(run_id, "first", {})
|
|
|
|
# Second publish should either succeed after queue drains or warn+drop
|
|
# It should not hang indefinitely
|
|
async def publish_second():
|
|
await bridge.publish(run_id, "second", {})
|
|
|
|
# Give it a generous timeout — the publish timeout is 30s but we don't
|
|
# want to wait that long in tests. Instead, drain the queue first.
|
|
async def drain():
|
|
await asyncio.sleep(0.05)
|
|
bridge._queues[run_id].get_nowait()
|
|
|
|
await asyncio.gather(publish_second(), drain())
|
|
assert bridge._queues[run_id].qsize() == 1
|
|
|
|
|
|
@pytest.mark.anyio
|
|
async def test_multiple_runs(bridge: MemoryStreamBridge):
|
|
"""Two different run_ids should not interfere with each other."""
|
|
await bridge.publish("run-a", "event-a", {"a": 1})
|
|
await bridge.publish("run-b", "event-b", {"b": 2})
|
|
await bridge.publish_end("run-a")
|
|
await bridge.publish_end("run-b")
|
|
|
|
events_a = []
|
|
async for entry in bridge.subscribe("run-a", heartbeat_interval=1.0):
|
|
events_a.append(entry)
|
|
if entry is END_SENTINEL:
|
|
break
|
|
|
|
events_b = []
|
|
async for entry in bridge.subscribe("run-b", heartbeat_interval=1.0):
|
|
events_b.append(entry)
|
|
if entry is END_SENTINEL:
|
|
break
|
|
|
|
assert len(events_a) == 2
|
|
assert events_a[0].event == "event-a"
|
|
assert events_a[0].data == {"a": 1}
|
|
|
|
assert len(events_b) == 2
|
|
assert events_b[0].event == "event-b"
|
|
assert events_b[0].data == {"b": 2}
|
|
|
|
|
|
@pytest.mark.anyio
|
|
async def test_event_id_format(bridge: MemoryStreamBridge):
|
|
"""Event IDs should use timestamp-sequence format."""
|
|
run_id = "run-id-format"
|
|
await bridge.publish(run_id, "test", {"key": "value"})
|
|
await bridge.publish_end(run_id)
|
|
|
|
received = []
|
|
async for entry in bridge.subscribe(run_id, heartbeat_interval=1.0):
|
|
received.append(entry)
|
|
if entry is END_SENTINEL:
|
|
break
|
|
|
|
event = received[0]
|
|
assert re.match(r"^\d+-\d+$", event.id), f"Expected timestamp-seq format, got {event.id}"
|
|
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# Factory tests
|
|
# ---------------------------------------------------------------------------
|
|
|
|
|
|
@pytest.mark.anyio
|
|
async def test_make_stream_bridge_defaults():
|
|
"""make_stream_bridge() with no config yields a MemoryStreamBridge."""
|
|
async with make_stream_bridge() as bridge:
|
|
assert isinstance(bridge, MemoryStreamBridge)
|