feat: add LangSmith tracing integration (#878)
* feat: add LangSmith tracing integration Add optional LangSmith tracing support that can be enabled via environment variables (LANGSMITH_TRACING, LANGSMITH_API_KEY, LANGSMITH_PROJECT, LANGSMITH_ENDPOINT). When enabled, a LangChainTracer callback is attached to chat models and run metadata is injected for trace tagging. Co-Authored-By: Claude <noreply@anthropic.com> * Update backend/src/config/tracing_config.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Update backend/src/agents/lead_agent/agent.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Update backend/src/agents/lead_agent/agent.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Update backend/src/models/factory.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Add threading lock to ensure thread-safe access to tracing configuration --------- Co-authored-by: Claude <noreply@anthropic.com> Co-authored-by: Willem Jiang <willem.jiang@gmail.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
This commit is contained in:
parent
75226b2fe6
commit
85af540076
|
|
@ -245,6 +245,17 @@ def make_lead_agent(config: RunnableConfig):
|
||||||
subagent_enabled = config.get("configurable", {}).get("subagent_enabled", False)
|
subagent_enabled = config.get("configurable", {}).get("subagent_enabled", False)
|
||||||
max_concurrent_subagents = config.get("configurable", {}).get("max_concurrent_subagents", 3)
|
max_concurrent_subagents = config.get("configurable", {}).get("max_concurrent_subagents", 3)
|
||||||
print(f"thinking_enabled: {thinking_enabled}, model_name: {model_name}, is_plan_mode: {is_plan_mode}, subagent_enabled: {subagent_enabled}, max_concurrent_subagents: {max_concurrent_subagents}")
|
print(f"thinking_enabled: {thinking_enabled}, model_name: {model_name}, is_plan_mode: {is_plan_mode}, subagent_enabled: {subagent_enabled}, max_concurrent_subagents: {max_concurrent_subagents}")
|
||||||
|
|
||||||
|
# Inject run metadata for LangSmith trace tagging
|
||||||
|
if "metadata" not in config:
|
||||||
|
config["metadata"] = {}
|
||||||
|
config["metadata"].update({
|
||||||
|
"model_name": model_name or "default",
|
||||||
|
"thinking_enabled": thinking_enabled,
|
||||||
|
"is_plan_mode": is_plan_mode,
|
||||||
|
"subagent_enabled": subagent_enabled,
|
||||||
|
})
|
||||||
|
|
||||||
return create_agent(
|
return create_agent(
|
||||||
model=create_chat_model(name=model_name, thinking_enabled=thinking_enabled),
|
model=create_chat_model(name=model_name, thinking_enabled=thinking_enabled),
|
||||||
tools=get_available_tools(model_name=model_name, subagent_enabled=subagent_enabled),
|
tools=get_available_tools(model_name=model_name, subagent_enabled=subagent_enabled),
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,7 @@ from .app_config import get_app_config
|
||||||
from .extensions_config import ExtensionsConfig, get_extensions_config
|
from .extensions_config import ExtensionsConfig, get_extensions_config
|
||||||
from .memory_config import MemoryConfig, get_memory_config
|
from .memory_config import MemoryConfig, get_memory_config
|
||||||
from .skills_config import SkillsConfig
|
from .skills_config import SkillsConfig
|
||||||
|
from .tracing_config import get_tracing_config, is_tracing_enabled
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"get_app_config",
|
"get_app_config",
|
||||||
|
|
@ -10,4 +11,6 @@ __all__ = [
|
||||||
"get_extensions_config",
|
"get_extensions_config",
|
||||||
"MemoryConfig",
|
"MemoryConfig",
|
||||||
"get_memory_config",
|
"get_memory_config",
|
||||||
|
"get_tracing_config",
|
||||||
|
"is_tracing_enabled",
|
||||||
]
|
]
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,51 @@
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
import threading
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
_config_lock = threading.Lock()
|
||||||
|
|
||||||
|
class TracingConfig(BaseModel):
|
||||||
|
"""Configuration for LangSmith tracing."""
|
||||||
|
|
||||||
|
enabled: bool = Field(...)
|
||||||
|
api_key: str | None = Field(...)
|
||||||
|
project: str = Field(...)
|
||||||
|
endpoint: str = Field(...)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_configured(self) -> bool:
|
||||||
|
"""Check if tracing is fully configured (enabled and has API key)."""
|
||||||
|
return self.enabled and bool(self.api_key)
|
||||||
|
|
||||||
|
|
||||||
|
_tracing_config: TracingConfig | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_tracing_config() -> TracingConfig:
|
||||||
|
"""Get the current tracing configuration from environment variables.
|
||||||
|
Returns:
|
||||||
|
TracingConfig with current settings.
|
||||||
|
"""
|
||||||
|
global _tracing_config
|
||||||
|
if _tracing_config is not None:
|
||||||
|
return _tracing_config
|
||||||
|
with _config_lock:
|
||||||
|
if _tracing_config is not None: # Double-check after acquiring lock
|
||||||
|
return _tracing_config
|
||||||
|
_tracing_config = TracingConfig(
|
||||||
|
enabled=os.environ.get("LANGSMITH_TRACING", "").lower() == "true",
|
||||||
|
api_key=os.environ.get("LANGSMITH_API_KEY"),
|
||||||
|
project=os.environ.get("LANGSMITH_PROJECT", "deer-flow"),
|
||||||
|
endpoint=os.environ.get("LANGSMITH_ENDPOINT", "https://api.smith.langchain.com"),
|
||||||
|
)
|
||||||
|
return _tracing_config
|
||||||
|
|
||||||
|
def is_tracing_enabled() -> bool:
|
||||||
|
"""Check if LangSmith tracing is enabled and configured.
|
||||||
|
Returns:
|
||||||
|
True if tracing is enabled and has an API key.
|
||||||
|
"""
|
||||||
|
return get_tracing_config().is_configured
|
||||||
|
|
||||||
|
|
@ -1,8 +1,10 @@
|
||||||
|
import logging
|
||||||
from langchain.chat_models import BaseChatModel
|
from langchain.chat_models import BaseChatModel
|
||||||
|
|
||||||
from src.config import get_app_config
|
from src.config import get_app_config, get_tracing_config, is_tracing_enabled
|
||||||
from src.reflection import resolve_class
|
from src.reflection import resolve_class
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
def create_chat_model(name: str | None = None, thinking_enabled: bool = False, **kwargs) -> BaseChatModel:
|
def create_chat_model(name: str | None = None, thinking_enabled: bool = False, **kwargs) -> BaseChatModel:
|
||||||
"""Create a chat model instance from the config.
|
"""Create a chat model instance from the config.
|
||||||
|
|
@ -37,4 +39,20 @@ def create_chat_model(name: str | None = None, thinking_enabled: bool = False, *
|
||||||
raise ValueError(f"Model {name} does not support thinking. Set `supports_thinking` to true in the `config.yaml` to enable thinking.") from None
|
raise ValueError(f"Model {name} does not support thinking. Set `supports_thinking` to true in the `config.yaml` to enable thinking.") from None
|
||||||
model_settings_from_config.update(model_config.when_thinking_enabled)
|
model_settings_from_config.update(model_config.when_thinking_enabled)
|
||||||
model_instance = model_class(**kwargs, **model_settings_from_config)
|
model_instance = model_class(**kwargs, **model_settings_from_config)
|
||||||
|
|
||||||
|
if is_tracing_enabled():
|
||||||
|
try:
|
||||||
|
from langchain_core.tracers.langchain import LangChainTracer
|
||||||
|
|
||||||
|
tracing_config = get_tracing_config()
|
||||||
|
tracer = LangChainTracer(
|
||||||
|
project_name=tracing_config.project,
|
||||||
|
)
|
||||||
|
existing_callbacks = model_instance.callbacks or []
|
||||||
|
model_instance.callbacks = [*existing_callbacks, tracer]
|
||||||
|
logger.debug(
|
||||||
|
f"LangSmith tracing attached to model '{name}' (project='{tracing_config.project}')"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to attach LangSmith tracing to model '{name}': {e}")
|
||||||
return model_instance
|
return model_instance
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue