feat: complete phase 4 -- conversation replay API + analytics dashboard

- Replay models: StepType enum, ReplayStep, ReplayPage frozen dataclasses
- Checkpoint transformer: PostgresSaver JSONB -> structured timeline steps
- Replay API: GET /api/conversations (paginated), GET /api/replay/{thread_id}
- Analytics models: AgentUsage, InterruptStats, AnalyticsResult
- Analytics event recorder: Protocol + PostgresAnalyticsRecorder + NoOp
- Analytics queries: resolution_rate, agent_usage, escalation_rate, cost, interrupts
- Analytics API: GET /api/analytics?range=Xd with envelope response
- DB migration: analytics_events table + conversations column additions
- 74 new tests, 399 total passing, 92.87% coverage
This commit is contained in:
Yaojia Wang
2026-03-31 13:35:45 +02:00
parent a2f750269d
commit 33db5aeb10
26 changed files with 1903 additions and 23 deletions

View File

@@ -0,0 +1,3 @@
"""Replay module -- conversation replay API and transformer."""
from __future__ import annotations

103
backend/app/replay/api.py Normal file
View File

@@ -0,0 +1,103 @@
"""Replay API router -- conversation listing and step-by-step replay."""
from __future__ import annotations
from typing import TYPE_CHECKING, Annotated, Any
from fastapi import APIRouter, HTTPException, Query, Request
if TYPE_CHECKING:
from psycopg_pool import AsyncConnectionPool
router = APIRouter(prefix="/api", tags=["replay"])
_LIST_CONVERSATIONS_SQL = """
SELECT thread_id, created_at, last_activity, status, total_tokens, total_cost_usd
FROM conversations
ORDER BY last_activity DESC
LIMIT %(limit)s OFFSET %(offset)s
"""
_GET_CHECKPOINTS_SQL = """
SELECT thread_id, checkpoint_id, checkpoint, metadata
FROM checkpoints
WHERE thread_id = %(thread_id)s
ORDER BY checkpoint_id ASC
"""
async def get_pool(request: Request) -> AsyncConnectionPool:
"""Dependency: extract the shared pool from app state."""
return request.app.state.pool
def _envelope(data: Any, *, success: bool = True, error: str | None = None) -> dict:
return {"success": success, "data": data, "error": error}
@router.get("/conversations")
async def list_conversations(
request: Request,
page: Annotated[int, Query(ge=1)] = 1,
per_page: Annotated[int, Query(ge=1, le=100)] = 20,
) -> dict:
"""List conversations with pagination."""
pool = await get_pool(request)
offset = (page - 1) * per_page
async with pool.connection() as conn:
cursor = await conn.execute(
_LIST_CONVERSATIONS_SQL,
{"limit": per_page, "offset": offset},
)
rows = await cursor.fetchall()
return _envelope([dict(row) for row in rows])
@router.get("/replay/{thread_id}")
async def get_replay(
thread_id: str,
request: Request,
page: Annotated[int, Query(ge=1)] = 1,
per_page: Annotated[int, Query(ge=1, le=100)] = 20,
) -> dict:
"""Return paginated replay steps for a conversation thread."""
from app.replay.transformer import transform_checkpoints
pool = await get_pool(request)
async with pool.connection() as conn:
cursor = await conn.execute(_GET_CHECKPOINTS_SQL, {"thread_id": thread_id})
rows = await cursor.fetchall()
if not rows:
raise HTTPException(status_code=404, detail=f"Thread '{thread_id}' not found")
all_steps = transform_checkpoints([dict(row) for row in rows])
total_steps = len(all_steps)
start = (page - 1) * per_page
end = start + per_page
page_steps = all_steps[start:end]
data = {
"thread_id": thread_id,
"total_steps": total_steps,
"page": page,
"per_page": per_page,
"steps": [
{
"step": s.step,
"type": s.type.value,
"timestamp": s.timestamp,
"content": s.content,
"agent": s.agent,
"tool": s.tool,
"params": s.params,
"result": s.result,
"reasoning": s.reasoning,
"tokens": s.tokens,
"duration_ms": s.duration_ms,
}
for s in page_steps
],
}
return _envelope(data)

View File

@@ -0,0 +1,52 @@
"""Value objects for conversation replay."""
from __future__ import annotations
from dataclasses import dataclass
from enum import Enum
class StepType(str, Enum):
"""Types of steps in a conversation replay."""
user_message = "user_message"
supervisor_routing = "supervisor_routing"
tool_call = "tool_call"
tool_result = "tool_result"
agent_response = "agent_response"
interrupt = "interrupt"
@dataclass(frozen=True)
class ReplayStep:
"""A single step in a conversation replay."""
step: int
type: StepType
timestamp: str
content: str = ""
agent: str | None = None
tool: str | None = None
params: dict | None = None
result: dict | None = None
reasoning: str | None = None
tokens: int | None = None
duration_ms: int | None = None
def __post_init__(self) -> None:
# Store params as a frozen copy to prevent mutation from the outside
if self.params is not None:
object.__setattr__(self, "params", dict(self.params))
if self.result is not None:
object.__setattr__(self, "result", dict(self.result))
@dataclass(frozen=True)
class ReplayPage:
"""A paginated page of replay steps for a conversation thread."""
thread_id: str
total_steps: int
page: int
per_page: int
steps: tuple[ReplayStep, ...]

View File

@@ -0,0 +1,116 @@
"""Transforms PostgresSaver checkpoint rows into ReplayStep list."""
from __future__ import annotations
import logging
from app.replay.models import ReplayStep, StepType
logger = logging.getLogger(__name__)
_EMPTY_TIMESTAMP = "1970-01-01T00:00:00Z"
def _extract_messages(row: dict) -> list[dict]:
"""Safely extract messages list from a checkpoint row."""
checkpoint = row.get("checkpoint")
if not checkpoint or not isinstance(checkpoint, dict):
return []
channel_values = checkpoint.get("channel_values")
if not channel_values or not isinstance(channel_values, dict):
return []
messages = channel_values.get("messages")
if not messages or not isinstance(messages, list):
return []
return messages
def _step_from_message(msg: dict, step_number: int) -> ReplayStep | None:
"""Convert a single message dict to a ReplayStep. Returns None for unknown types."""
msg_type = msg.get("type", "")
timestamp = msg.get("created_at") or _EMPTY_TIMESTAMP
content = msg.get("content") or ""
if isinstance(content, list):
# LangChain may encode content as a list of parts
content = " ".join(
part.get("text", "") if isinstance(part, dict) else str(part)
for part in content
)
if msg_type == "human":
return ReplayStep(
step=step_number,
type=StepType.user_message,
timestamp=timestamp,
content=content,
)
if msg_type == "ai":
tool_calls = msg.get("tool_calls") or []
if tool_calls:
first = tool_calls[0]
return ReplayStep(
step=step_number,
type=StepType.tool_call,
timestamp=timestamp,
content=content,
tool=first.get("name"),
params=dict(first.get("args") or {}),
)
return ReplayStep(
step=step_number,
type=StepType.agent_response,
timestamp=timestamp,
content=content,
agent=msg.get("name"),
)
if msg_type == "tool":
raw = content
result: dict | None = None
try:
import json
result = json.loads(raw)
except (ValueError, TypeError):
result = {"raw": raw}
return ReplayStep(
step=step_number,
type=StepType.tool_result,
timestamp=timestamp,
tool=msg.get("name"),
result=result,
)
logger.debug("Skipping unknown message type: %s", msg_type)
return None
def transform_checkpoints(rows: list[dict]) -> list[ReplayStep]:
"""Transform a list of checkpoint rows into an ordered list of ReplaySteps.
Steps are numbered sequentially starting from 1 across all rows.
Unknown or malformed messages are silently skipped.
"""
steps: list[ReplayStep] = []
step_number = 1
for row in rows:
try:
messages = _extract_messages(row)
except Exception: # noqa: BLE001
logger.exception("Error extracting messages from checkpoint row")
continue
for msg in messages:
try:
step = _step_from_message(msg, step_number)
except Exception: # noqa: BLE001
logger.exception("Error converting message to ReplayStep")
step = None
if step is not None:
steps.append(step)
step_number += 1
return steps