feat: initial monorepo scaffold - Next.js 15 + Prisma + Python SDK stubs

- Turborepo monorepo with apps/web and packages/database, sdk-python
- Next.js 15 app with professional landing page (dark theme, emerald accent)
- Prisma schema: Trace, DecisionPoint, Span, Event models with full indexing
- Docker Compose: web (port 4200), postgres:16, redis:7, migrate service
- Python SDK package stubs: init, trace decorator, log_decision, integrations
- Multi-stage Dockerfile for standalone Next.js production build
This commit is contained in:
Vectry
2026-02-09 22:46:16 +00:00
parent 572fd7e234
commit 9264866d1f
31 changed files with 3244 additions and 0 deletions

View File

@@ -0,0 +1 @@
"""Integration packages for AgentLens."""

View File

@@ -0,0 +1,55 @@
"""LangChain integration for AgentLens."""
from typing import Any, Dict, Optional, Sequence
from langchain_core.callbacks import BaseCallbackHandler
from langchain_core.outputs import LLMResult
from langchain_core.messages import BaseMessage
class AgentLensCallbackHandler(BaseCallbackHandler):
"""Callback handler for LangChain integration with AgentLens.
This handler captures LLM calls, tool calls, and agent actions
to provide observability for LangChain-based agents.
"""
def __init__(self) -> None:
self.trace_id: Optional[str] = None
def on_llm_start(
self,
serialized: Dict[str, Any],
prompts: list[str],
**kwargs: Any,
) -> None:
"""Called when an LLM starts processing."""
print(f"[AgentLens] LLM started: {serialized.get('name', 'unknown')}")
def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None:
"""Called when an LLM finishes processing."""
print(f"[AgentLens] LLM completed")
def on_llm_error(self, error: Exception, **kwargs: Any) -> None:
"""Called when an LLM encounters an error."""
print(f"[AgentLens] LLM error: {error}")
def on_tool_start(
self,
serialized: Dict[str, Any],
input_str: str,
**kwargs: Any,
) -> None:
"""Called when a tool starts executing."""
print(f"[AgentLens] Tool started: {serialized.get('name', 'unknown')}")
def on_tool_end(self, output: str, **kwargs: Any) -> None:
"""Called when a tool finishes executing."""
print(f"[AgentLens] Tool completed")
def on_tool_error(self, error: Exception, **kwargs: Any) -> None:
"""Called when a tool encounters an error."""
print(f"[AgentLens] Tool error: {error}")
def on_agent_action(self, action: Any, **kwargs: Any) -> None:
"""Called when an agent performs an action."""
print(f"[AgentLens] Agent action: {action.tool}")

View File

@@ -0,0 +1,39 @@
"""OpenAI integration for AgentLens."""
from typing import Any, Optional
from functools import wraps
def wrap_openai(client: Any) -> Any:
"""Wrap an OpenAI client to add AgentLens tracing.
Args:
client: The OpenAI client to wrap.
Returns:
Wrapped OpenAI client with AgentLens tracing enabled.
Example:
import openai
from agentlens.integrations.openai import wrap_openai
client = openai.OpenAI(api_key="sk-...")
traced_client = wrap_openai(client)
response = traced_client.chat.completions.create(...)
"""
original_create = client.chat.completions.create
@wraps(original_create)
def traced_create(*args: Any, **kwargs: Any) -> Any:
print("[AgentLens] OpenAI chat completion started")
try:
response = original_create(*args, **kwargs)
print("[AgentLens] OpenAI chat completion completed")
return response
except Exception as e:
print(f"[AgentLens] OpenAI error: {e}")
raise
client.chat.completions.create = traced_create
return client