feat: initial monorepo scaffold - Next.js 15 + Prisma + Python SDK stubs
- Turborepo monorepo with apps/web and packages/database, sdk-python - Next.js 15 app with professional landing page (dark theme, emerald accent) - Prisma schema: Trace, DecisionPoint, Span, Event models with full indexing - Docker Compose: web (port 4200), postgres:16, redis:7, migrate service - Python SDK package stubs: init, trace decorator, log_decision, integrations - Multi-stage Dockerfile for standalone Next.js production build
This commit is contained in:
1
packages/sdk-python/agentlens/integrations/__init__.py
Normal file
1
packages/sdk-python/agentlens/integrations/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Integration packages for AgentLens."""
|
||||
55
packages/sdk-python/agentlens/integrations/langchain.py
Normal file
55
packages/sdk-python/agentlens/integrations/langchain.py
Normal file
@@ -0,0 +1,55 @@
|
||||
"""LangChain integration for AgentLens."""
|
||||
|
||||
from typing import Any, Dict, Optional, Sequence
|
||||
from langchain_core.callbacks import BaseCallbackHandler
|
||||
from langchain_core.outputs import LLMResult
|
||||
from langchain_core.messages import BaseMessage
|
||||
|
||||
|
||||
class AgentLensCallbackHandler(BaseCallbackHandler):
|
||||
"""Callback handler for LangChain integration with AgentLens.
|
||||
|
||||
This handler captures LLM calls, tool calls, and agent actions
|
||||
to provide observability for LangChain-based agents.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.trace_id: Optional[str] = None
|
||||
|
||||
def on_llm_start(
|
||||
self,
|
||||
serialized: Dict[str, Any],
|
||||
prompts: list[str],
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Called when an LLM starts processing."""
|
||||
print(f"[AgentLens] LLM started: {serialized.get('name', 'unknown')}")
|
||||
|
||||
def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None:
|
||||
"""Called when an LLM finishes processing."""
|
||||
print(f"[AgentLens] LLM completed")
|
||||
|
||||
def on_llm_error(self, error: Exception, **kwargs: Any) -> None:
|
||||
"""Called when an LLM encounters an error."""
|
||||
print(f"[AgentLens] LLM error: {error}")
|
||||
|
||||
def on_tool_start(
|
||||
self,
|
||||
serialized: Dict[str, Any],
|
||||
input_str: str,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Called when a tool starts executing."""
|
||||
print(f"[AgentLens] Tool started: {serialized.get('name', 'unknown')}")
|
||||
|
||||
def on_tool_end(self, output: str, **kwargs: Any) -> None:
|
||||
"""Called when a tool finishes executing."""
|
||||
print(f"[AgentLens] Tool completed")
|
||||
|
||||
def on_tool_error(self, error: Exception, **kwargs: Any) -> None:
|
||||
"""Called when a tool encounters an error."""
|
||||
print(f"[AgentLens] Tool error: {error}")
|
||||
|
||||
def on_agent_action(self, action: Any, **kwargs: Any) -> None:
|
||||
"""Called when an agent performs an action."""
|
||||
print(f"[AgentLens] Agent action: {action.tool}")
|
||||
39
packages/sdk-python/agentlens/integrations/openai.py
Normal file
39
packages/sdk-python/agentlens/integrations/openai.py
Normal file
@@ -0,0 +1,39 @@
|
||||
"""OpenAI integration for AgentLens."""
|
||||
|
||||
from typing import Any, Optional
|
||||
from functools import wraps
|
||||
|
||||
|
||||
def wrap_openai(client: Any) -> Any:
|
||||
"""Wrap an OpenAI client to add AgentLens tracing.
|
||||
|
||||
Args:
|
||||
client: The OpenAI client to wrap.
|
||||
|
||||
Returns:
|
||||
Wrapped OpenAI client with AgentLens tracing enabled.
|
||||
|
||||
Example:
|
||||
import openai
|
||||
from agentlens.integrations.openai import wrap_openai
|
||||
|
||||
client = openai.OpenAI(api_key="sk-...")
|
||||
traced_client = wrap_openai(client)
|
||||
|
||||
response = traced_client.chat.completions.create(...)
|
||||
"""
|
||||
original_create = client.chat.completions.create
|
||||
|
||||
@wraps(original_create)
|
||||
def traced_create(*args: Any, **kwargs: Any) -> Any:
|
||||
print("[AgentLens] OpenAI chat completion started")
|
||||
try:
|
||||
response = original_create(*args, **kwargs)
|
||||
print("[AgentLens] OpenAI chat completion completed")
|
||||
return response
|
||||
except Exception as e:
|
||||
print(f"[AgentLens] OpenAI error: {e}")
|
||||
raise
|
||||
|
||||
client.chat.completions.create = traced_create
|
||||
return client
|
||||
Reference in New Issue
Block a user