feat: complete phase 4 -- conversation replay API + analytics dashboard
- Replay models: StepType enum, ReplayStep, ReplayPage frozen dataclasses
- Checkpoint transformer: PostgresSaver JSONB -> structured timeline steps
- Replay API: GET /api/conversations (paginated), GET /api/replay/{thread_id}
- Analytics models: AgentUsage, InterruptStats, AnalyticsResult
- Analytics event recorder: Protocol + PostgresAnalyticsRecorder + NoOp
- Analytics queries: resolution_rate, agent_usage, escalation_rate, cost, interrupts
- Analytics API: GET /api/analytics?range=Xd with envelope response
- DB migration: analytics_events table + conversations column additions
- 74 new tests, 399 total passing, 92.87% coverage
This commit is contained in:
95
backend/app/analytics/event_recorder.py
Normal file
95
backend/app/analytics/event_recorder.py
Normal file
@@ -0,0 +1,95 @@
|
||||
"""Analytics event recorder -- Protocol and implementations."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any, Protocol, runtime_checkable
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from psycopg_pool import AsyncConnectionPool
|
||||
|
||||
_INSERT_SQL = """
|
||||
INSERT INTO analytics_events
|
||||
(thread_id, event_type, agent_name, tool_name, tokens_used, cost_usd,
|
||||
duration_ms, success, error_message, metadata)
|
||||
VALUES
|
||||
(%(thread_id)s, %(event_type)s, %(agent_name)s, %(tool_name)s,
|
||||
%(tokens_used)s, %(cost_usd)s, %(duration_ms)s, %(success)s,
|
||||
%(error_message)s, %(metadata)s)
|
||||
"""
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class AnalyticsRecorder(Protocol):
|
||||
"""Protocol for recording analytics events."""
|
||||
|
||||
async def record(
|
||||
self,
|
||||
*,
|
||||
thread_id: str,
|
||||
event_type: str,
|
||||
agent_name: str | None = None,
|
||||
tool_name: str | None = None,
|
||||
tokens_used: int = 0,
|
||||
cost_usd: float = 0.0,
|
||||
duration_ms: int | None = None,
|
||||
success: bool | None = None,
|
||||
error_message: str | None = None,
|
||||
metadata: dict | None = None,
|
||||
) -> None: ...
|
||||
|
||||
|
||||
class NoOpAnalyticsRecorder:
|
||||
"""No-op implementation for testing or when the DB is unavailable."""
|
||||
|
||||
async def record(
|
||||
self,
|
||||
*,
|
||||
thread_id: str,
|
||||
event_type: str,
|
||||
agent_name: str | None = None,
|
||||
tool_name: str | None = None,
|
||||
tokens_used: int = 0,
|
||||
cost_usd: float = 0.0,
|
||||
duration_ms: int | None = None,
|
||||
success: bool | None = None,
|
||||
error_message: str | None = None,
|
||||
metadata: dict | None = None,
|
||||
) -> None:
|
||||
"""Do nothing."""
|
||||
|
||||
|
||||
class PostgresAnalyticsRecorder:
|
||||
"""Postgres-backed analytics recorder -- INSERTs into analytics_events."""
|
||||
|
||||
def __init__(self, pool: AsyncConnectionPool) -> None:
|
||||
self._pool = pool
|
||||
|
||||
async def record(
|
||||
self,
|
||||
*,
|
||||
thread_id: str,
|
||||
event_type: str,
|
||||
agent_name: str | None = None,
|
||||
tool_name: str | None = None,
|
||||
tokens_used: int = 0,
|
||||
cost_usd: float = 0.0,
|
||||
duration_ms: int | None = None,
|
||||
success: bool | None = None,
|
||||
error_message: str | None = None,
|
||||
metadata: dict | None = None,
|
||||
) -> None:
|
||||
"""Insert one analytics event row."""
|
||||
params: dict[str, Any] = {
|
||||
"thread_id": thread_id,
|
||||
"event_type": event_type,
|
||||
"agent_name": agent_name,
|
||||
"tool_name": tool_name,
|
||||
"tokens_used": tokens_used,
|
||||
"cost_usd": cost_usd,
|
||||
"duration_ms": duration_ms,
|
||||
"success": success,
|
||||
"error_message": error_message,
|
||||
"metadata": metadata or {},
|
||||
}
|
||||
async with self._pool.connection() as conn:
|
||||
await conn.execute(_INSERT_SQL, params)
|
||||
Reference in New Issue
Block a user