feat: complete phase 4 -- conversation replay API + analytics dashboard

- Replay models: StepType enum, ReplayStep, ReplayPage frozen dataclasses
- Checkpoint transformer: PostgresSaver JSONB -> structured timeline steps
- Replay API: GET /api/conversations (paginated), GET /api/replay/{thread_id}
- Analytics models: AgentUsage, InterruptStats, AnalyticsResult
- Analytics event recorder: Protocol + PostgresAnalyticsRecorder + NoOp
- Analytics queries: resolution_rate, agent_usage, escalation_rate, cost, interrupts
- Analytics API: GET /api/analytics?range=Xd with envelope response
- DB migration: analytics_events table + conversations column additions
- 74 new tests, 399 total passing, 92.87% coverage
This commit is contained in:
Yaojia Wang
2026-03-31 13:35:45 +02:00
parent a2f750269d
commit 33db5aeb10
26 changed files with 1903 additions and 23 deletions

View File

@@ -0,0 +1,3 @@
"""Analytics module -- event recording and dashboard queries."""
from __future__ import annotations

View File

@@ -0,0 +1,51 @@
"""Analytics API router -- dashboard metrics endpoint."""
from __future__ import annotations
import re
from dataclasses import asdict
from typing import TYPE_CHECKING, Any
from fastapi import APIRouter, HTTPException, Query, Request
from app.analytics.queries import get_analytics
if TYPE_CHECKING:
from psycopg_pool import AsyncConnectionPool
router = APIRouter(prefix="/api/analytics", tags=["analytics"])
_RANGE_PATTERN = re.compile(r"^(\d+)d$")
_DEFAULT_RANGE = "7d"
async def _get_pool(request: Request) -> AsyncConnectionPool:
"""Dependency: extract the shared pool from app state."""
return request.app.state.pool
def _envelope(data: Any, *, success: bool = True, error: str | None = None) -> dict:
return {"success": success, "data": data, "error": error}
def _parse_range(range_str: str) -> int:
"""Parse 'Xd' range string to integer days. Raises 400 on invalid format."""
match = _RANGE_PATTERN.match(range_str)
if not match:
raise HTTPException(
status_code=400,
detail=f"Invalid range format '{range_str}'. Expected format: '<N>d' e.g. '7d', '30d'.",
)
return int(match.group(1))
@router.get("")
async def analytics(
request: Request,
range: str = Query(default=_DEFAULT_RANGE, alias="range"), # noqa: A002
) -> dict:
"""Return aggregated analytics metrics for the given time range."""
range_days = _parse_range(range)
pool = await _get_pool(request)
result = await get_analytics(pool, range_days=range_days)
return _envelope(asdict(result))

View File

@@ -0,0 +1,95 @@
"""Analytics event recorder -- Protocol and implementations."""
from __future__ import annotations
from typing import TYPE_CHECKING, Any, Protocol, runtime_checkable
if TYPE_CHECKING:
from psycopg_pool import AsyncConnectionPool
_INSERT_SQL = """
INSERT INTO analytics_events
(thread_id, event_type, agent_name, tool_name, tokens_used, cost_usd,
duration_ms, success, error_message, metadata)
VALUES
(%(thread_id)s, %(event_type)s, %(agent_name)s, %(tool_name)s,
%(tokens_used)s, %(cost_usd)s, %(duration_ms)s, %(success)s,
%(error_message)s, %(metadata)s)
"""
@runtime_checkable
class AnalyticsRecorder(Protocol):
"""Protocol for recording analytics events."""
async def record(
self,
*,
thread_id: str,
event_type: str,
agent_name: str | None = None,
tool_name: str | None = None,
tokens_used: int = 0,
cost_usd: float = 0.0,
duration_ms: int | None = None,
success: bool | None = None,
error_message: str | None = None,
metadata: dict | None = None,
) -> None: ...
class NoOpAnalyticsRecorder:
"""No-op implementation for testing or when the DB is unavailable."""
async def record(
self,
*,
thread_id: str,
event_type: str,
agent_name: str | None = None,
tool_name: str | None = None,
tokens_used: int = 0,
cost_usd: float = 0.0,
duration_ms: int | None = None,
success: bool | None = None,
error_message: str | None = None,
metadata: dict | None = None,
) -> None:
"""Do nothing."""
class PostgresAnalyticsRecorder:
"""Postgres-backed analytics recorder -- INSERTs into analytics_events."""
def __init__(self, pool: AsyncConnectionPool) -> None:
self._pool = pool
async def record(
self,
*,
thread_id: str,
event_type: str,
agent_name: str | None = None,
tool_name: str | None = None,
tokens_used: int = 0,
cost_usd: float = 0.0,
duration_ms: int | None = None,
success: bool | None = None,
error_message: str | None = None,
metadata: dict | None = None,
) -> None:
"""Insert one analytics event row."""
params: dict[str, Any] = {
"thread_id": thread_id,
"event_type": event_type,
"agent_name": agent_name,
"tool_name": tool_name,
"tokens_used": tokens_used,
"cost_usd": cost_usd,
"duration_ms": duration_ms,
"success": success,
"error_message": error_message,
"metadata": metadata or {},
}
async with self._pool.connection() as conn:
await conn.execute(_INSERT_SQL, params)

View File

@@ -0,0 +1,38 @@
"""Value objects for analytics dashboard."""
from __future__ import annotations
from dataclasses import dataclass
@dataclass(frozen=True)
class AgentUsage:
"""Agent usage statistics within a time range."""
agent: str
count: int
percentage: float
@dataclass(frozen=True)
class InterruptStats:
"""Interrupt approval/rejection statistics within a time range."""
total: int = 0
approved: int = 0
rejected: int = 0
expired: int = 0
@dataclass(frozen=True)
class AnalyticsResult:
"""Full analytics result for a given time range."""
range: str
total_conversations: int
resolution_rate: float
escalation_rate: float
avg_turns_per_conversation: float
avg_cost_per_conversation_usd: float
agent_usage: tuple[AgentUsage, ...]
interrupt_stats: InterruptStats

View File

@@ -0,0 +1,177 @@
"""Analytics query functions -- all async, take pool + range_days."""
from __future__ import annotations
from typing import TYPE_CHECKING
from app.analytics.models import AgentUsage, AnalyticsResult, InterruptStats
if TYPE_CHECKING:
from psycopg_pool import AsyncConnectionPool
_RESOLUTION_RATE_SQL = """
SELECT
CASE WHEN COUNT(*) = 0 THEN 0.0
ELSE COUNT(*) FILTER (WHERE resolution_type = 'resolved')::float / COUNT(*)
END AS rate
FROM conversations
WHERE created_at >= NOW() - INTERVAL '%(days)s days'
"""
_ESCALATION_RATE_SQL = """
SELECT
CASE WHEN COUNT(*) = 0 THEN 0.0
ELSE COUNT(*) FILTER (WHERE resolution_type = 'escalated')::float / COUNT(*)
END AS rate
FROM conversations
WHERE created_at >= NOW() - INTERVAL '%(days)s days'
"""
_TOTAL_CONVERSATIONS_SQL = """
SELECT COUNT(*) AS total
FROM conversations
WHERE created_at >= NOW() - INTERVAL '%(days)s days'
"""
_AVG_TURNS_SQL = """
SELECT COALESCE(AVG(turn_count), 0.0) AS avg_turns
FROM conversations
WHERE created_at >= NOW() - INTERVAL '%(days)s days'
"""
_COST_PER_CONVERSATION_SQL = """
SELECT COALESCE(AVG(total_cost_usd), 0.0) AS avg_cost
FROM conversations
WHERE created_at >= NOW() - INTERVAL '%(days)s days'
"""
_AGENT_USAGE_SQL = """
SELECT
agent,
COUNT(*) AS count,
ROUND(COUNT(*) * 100.0 / NULLIF(SUM(COUNT(*)) OVER (), 0), 2) AS percentage
FROM (
SELECT UNNEST(agents_used) AS agent
FROM conversations
WHERE created_at >= NOW() - INTERVAL '%(days)s days'
AND agents_used IS NOT NULL
) sub
GROUP BY agent
ORDER BY count DESC
"""
_INTERRUPT_STATS_SQL = """
SELECT
COUNT(*) FILTER (WHERE event_type = 'interrupt') AS total,
COUNT(*) FILTER (WHERE event_type = 'interrupt' AND success = TRUE) AS approved,
COUNT(*) FILTER (WHERE event_type = 'interrupt' AND success = FALSE
AND error_message IS NULL) AS rejected,
COUNT(*) FILTER (WHERE event_type = 'interrupt' AND error_message = 'expired') AS expired
FROM analytics_events
WHERE created_at >= NOW() - INTERVAL '%(days)s days'
"""
async def resolution_rate(pool: AsyncConnectionPool, range_days: int) -> float:
"""Return the fraction of resolved conversations in the given range."""
async with pool.connection() as conn:
cursor = await conn.execute(_RESOLUTION_RATE_SQL, {"days": range_days})
row = await cursor.fetchone()
if not row:
return 0.0
return float(row.get("rate") or 0.0)
async def escalation_rate(pool: AsyncConnectionPool, range_days: int) -> float:
"""Return the fraction of escalated conversations in the given range."""
async with pool.connection() as conn:
cursor = await conn.execute(_ESCALATION_RATE_SQL, {"days": range_days})
row = await cursor.fetchone()
if not row:
return 0.0
return float(row.get("rate") or 0.0)
async def _total_conversations(pool: AsyncConnectionPool, range_days: int) -> int:
"""Return the total number of conversations in the given range."""
async with pool.connection() as conn:
cursor = await conn.execute(_TOTAL_CONVERSATIONS_SQL, {"days": range_days})
row = await cursor.fetchone()
if not row:
return 0
return int(row.get("total") or 0)
async def _avg_turns(pool: AsyncConnectionPool, range_days: int) -> float:
"""Return the average turn count per conversation in the given range."""
async with pool.connection() as conn:
cursor = await conn.execute(_AVG_TURNS_SQL, {"days": range_days})
row = await cursor.fetchone()
if not row:
return 0.0
return float(row.get("avg_turns") or 0.0)
async def cost_per_conversation(pool: AsyncConnectionPool, range_days: int) -> float:
"""Return the average cost per conversation in the given range."""
async with pool.connection() as conn:
cursor = await conn.execute(_COST_PER_CONVERSATION_SQL, {"days": range_days})
row = await cursor.fetchone()
if not row:
return 0.0
return float(row.get("avg_cost") or 0.0)
async def agent_usage(pool: AsyncConnectionPool, range_days: int) -> tuple[AgentUsage, ...]:
"""Return per-agent usage statistics for the given range."""
async with pool.connection() as conn:
cursor = await conn.execute(_AGENT_USAGE_SQL, {"days": range_days})
rows = await cursor.fetchall()
if not rows:
return ()
return tuple(
AgentUsage(
agent=row["agent"],
count=int(row["count"]),
percentage=float(row["percentage"]),
)
for row in rows
)
async def interrupt_stats(pool: AsyncConnectionPool, range_days: int) -> InterruptStats:
"""Return interrupt approval/rejection statistics for the given range."""
async with pool.connection() as conn:
cursor = await conn.execute(_INTERRUPT_STATS_SQL, {"days": range_days})
row = await cursor.fetchone()
if not row:
return InterruptStats()
return InterruptStats(
total=int(row.get("total") or 0),
approved=int(row.get("approved") or 0),
rejected=int(row.get("rejected") or 0),
expired=int(row.get("expired") or 0),
)
async def get_analytics(pool: AsyncConnectionPool, range_days: int) -> AnalyticsResult:
"""Aggregate all analytics metrics into a single AnalyticsResult."""
res_rate, esc_rate, cost, usage, i_stats, total, avg_t = (
await resolution_rate(pool, range_days),
await escalation_rate(pool, range_days),
await cost_per_conversation(pool, range_days),
await agent_usage(pool, range_days),
await interrupt_stats(pool, range_days),
await _total_conversations(pool, range_days),
await _avg_turns(pool, range_days),
)
return AnalyticsResult(
range=f"{range_days}d",
total_conversations=total,
resolution_rate=res_rate,
escalation_rate=esc_rate,
avg_turns_per_conversation=avg_t,
avg_cost_per_conversation_usd=cost,
agent_usage=usage,
interrupt_stats=i_stats,
)