deerflow2/backend/packages/harness/deerflow/runtime/serialization.py

126 lines
4.1 KiB
Python

"""Canonical serialization for LangChain / LangGraph objects.
Provides a single source of truth for converting LangChain message
objects, Pydantic models, and LangGraph state dicts into plain
JSON-serialisable Python structures.
Consumers: ``deerflow.runtime.runs.worker`` (SSE publishing) and
``app.gateway.routers.threads`` (REST responses).
"""
from __future__ import annotations
from typing import Any
_TIMESTAMP_KEYS: tuple[str, ...] = ("deerflow_created_at", "created_at", "timestamp", "sent_at")
_MESSAGE_TYPES: set[str] = {"human", "ai", "tool", "system", "function", "chat"}
def _read_message_timestamp(message: dict[str, Any]) -> str | None:
top = message.get("created_at")
if isinstance(top, str) and top:
return top
additional_kwargs = message.get("additional_kwargs")
if isinstance(additional_kwargs, dict):
for key in _TIMESTAMP_KEYS:
value = additional_kwargs.get(key)
if isinstance(value, str) and value:
return value
response_metadata = message.get("response_metadata")
if isinstance(response_metadata, dict):
for key in _TIMESTAMP_KEYS:
value = response_metadata.get(key)
if isinstance(value, str) and value:
return value
return None
def _attach_created_at(message: Any) -> Any:
if not isinstance(message, dict):
return message
if message.get("type") not in _MESSAGE_TYPES:
return message
timestamp = _read_message_timestamp(message)
if timestamp:
message["created_at"] = timestamp
return message
def _normalize_message_timestamps(payload: Any) -> Any:
if isinstance(payload, list):
return [_attach_created_at(item) for item in payload]
return _attach_created_at(payload)
def serialize_lc_object(obj: Any) -> Any:
"""Recursively serialize a LangChain object to a JSON-serialisable dict."""
if obj is None:
return None
if isinstance(obj, (str, int, float, bool)):
return obj
if isinstance(obj, dict):
return {k: serialize_lc_object(v) for k, v in obj.items()}
if isinstance(obj, (list, tuple)):
return [serialize_lc_object(item) for item in obj]
# Pydantic v2
if hasattr(obj, "model_dump"):
try:
return obj.model_dump()
except Exception:
pass
# Pydantic v1 / older objects
if hasattr(obj, "dict"):
try:
return obj.dict()
except Exception:
pass
# Last resort
try:
return str(obj)
except Exception:
return repr(obj)
def serialize_channel_values(channel_values: dict[str, Any]) -> dict[str, Any]:
"""Serialize channel values, stripping internal LangGraph keys.
Internal keys like ``__pregel_*`` and ``__interrupt__`` are removed
to match what the LangGraph Platform API returns.
"""
result: dict[str, Any] = {}
for key, value in channel_values.items():
if key.startswith("__pregel_") or key == "__interrupt__":
continue
serialized = serialize_lc_object(value)
if key == "messages":
serialized = _normalize_message_timestamps(serialized)
result[key] = serialized
return result
def serialize_messages_tuple(obj: Any) -> Any:
"""Serialize a messages-mode tuple ``(chunk, metadata)``."""
if isinstance(obj, tuple) and len(obj) == 2:
chunk, metadata = obj
serialized_chunk = _normalize_message_timestamps(serialize_lc_object(chunk))
return [serialized_chunk, metadata if isinstance(metadata, dict) else {}]
return serialize_lc_object(obj)
def serialize(obj: Any, *, mode: str = "") -> Any:
"""Serialize LangChain objects with mode-specific handling.
* ``messages`` — obj is ``(message_chunk, metadata_dict)``
* ``values`` — obj is the full state dict; ``__pregel_*`` keys stripped
* everything else — recursive ``model_dump()`` / ``dict()`` fallback
"""
if mode == "messages":
return serialize_messages_tuple(obj)
if mode == "values":
return serialize_channel_values(obj) if isinstance(obj, dict) else serialize_lc_object(obj)
return serialize_lc_object(obj)