mirror of
https://github.com/bytedance/deer-flow.git
synced 2026-04-25 11:18:22 +00:00
Major refactoring of deerflow/runtime/: - runs/callbacks/ - new callback system (builder, events, title, tokens) - runs/internal/ - execution internals (executor, supervisor, stream_logic, registry) - runs/internal/execution/ - execution artifacts and events handling - runs/facade.py - high-level run facade - runs/observer.py - run observation protocol - runs/types.py - type definitions - runs/store/ - simplified store interfaces (create, delete, query, event) Refactor stream_bridge/: - Replace old providers with contract.py and exceptions.py - Remove async_provider.py, base.py, memory.py Add documentation: - README.md and README_zh.md for runtime module Remove deprecated: - manager.py moved to internal/ - worker.py, schemas.py - user_context.py Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
79 lines
2.6 KiB
Python
79 lines
2.6 KiB
Python
"""Canonical serialization for LangChain / LangGraph objects.
|
|
|
|
Provides a single source of truth for converting LangChain message
|
|
objects, Pydantic models, and LangGraph state dicts into plain
|
|
JSON-serialisable Python structures.
|
|
|
|
Consumers: runs execution internals (SSE publishing) and
|
|
gateway thread state/history responses.
|
|
"""
|
|
|
|
from __future__ import annotations
|
|
|
|
from typing import Any
|
|
|
|
|
|
def serialize_lc_object(obj: Any) -> Any:
|
|
"""Recursively serialize a LangChain object to a JSON-serialisable dict."""
|
|
if obj is None:
|
|
return None
|
|
if isinstance(obj, (str, int, float, bool)):
|
|
return obj
|
|
if isinstance(obj, dict):
|
|
return {k: serialize_lc_object(v) for k, v in obj.items()}
|
|
if isinstance(obj, (list, tuple)):
|
|
return [serialize_lc_object(item) for item in obj]
|
|
# Pydantic v2
|
|
if hasattr(obj, "model_dump"):
|
|
try:
|
|
return obj.model_dump()
|
|
except Exception:
|
|
pass
|
|
# Pydantic v1 / older objects
|
|
if hasattr(obj, "dict"):
|
|
try:
|
|
return obj.dict()
|
|
except Exception:
|
|
pass
|
|
# Last resort
|
|
try:
|
|
return str(obj)
|
|
except Exception:
|
|
return repr(obj)
|
|
|
|
|
|
def serialize_channel_values(channel_values: dict[str, Any]) -> dict[str, Any]:
|
|
"""Serialize channel values, stripping internal LangGraph keys.
|
|
|
|
Internal keys like ``__pregel_*`` and ``__interrupt__`` are removed
|
|
to match what the LangGraph Platform API returns.
|
|
"""
|
|
result: dict[str, Any] = {}
|
|
for key, value in channel_values.items():
|
|
if key.startswith("__pregel_") or key == "__interrupt__":
|
|
continue
|
|
result[key] = serialize_lc_object(value)
|
|
return result
|
|
|
|
|
|
def serialize_messages_tuple(obj: Any) -> Any:
|
|
"""Serialize a messages-mode tuple ``(chunk, metadata)``."""
|
|
if isinstance(obj, tuple) and len(obj) == 2:
|
|
chunk, metadata = obj
|
|
return [serialize_lc_object(chunk), metadata if isinstance(metadata, dict) else {}]
|
|
return serialize_lc_object(obj)
|
|
|
|
|
|
def serialize(obj: Any, *, mode: str = "") -> Any:
|
|
"""Serialize LangChain objects with mode-specific handling.
|
|
|
|
* ``messages`` — obj is ``(message_chunk, metadata_dict)``
|
|
* ``values`` — obj is the full state dict; ``__pregel_*`` keys stripped
|
|
* everything else — recursive ``model_dump()`` / ``dict()`` fallback
|
|
"""
|
|
if mode == "messages":
|
|
return serialize_messages_tuple(obj)
|
|
if mode == "values":
|
|
return serialize_channel_values(obj) if isinstance(obj, dict) else serialize_lc_object(obj)
|
|
return serialize_lc_object(obj)
|