feat: complete phase 5 -- error hardening, frontend, Docker, demo, docs

Backend:
- ConversationTracker: Protocol + PostgresConversationTracker for lifecycle tracking
- Error handler: ErrorCategory enum, classify_error(), with_retry() exponential backoff
- Wire PostgresAnalyticsRecorder + ConversationTracker into ws_handler
- Rate limiting (10 msg/10s per thread), edge case hardening
- Health endpoint GET /api/health, version 0.5.0
- Demo seed data script + sample OpenAPI spec

Frontend (all new):
- React Router with NavBar (Chat / Replay / Dashboard / Review)
- ReplayListPage + ReplayPage with ReplayTimeline component
- DashboardPage with MetricCard, range selector, zero-state
- ReviewPage for OpenAPI classification review
- ErrorBanner for WebSocket disconnect handling
- API client (api.ts) with typed fetch wrappers

Infrastructure:
- Frontend Dockerfile (multi-stage node -> nginx)
- nginx.conf with SPA routing + API/WS proxy
- docker-compose.yml with frontend service + healthchecks
- .env.example files (root + backend)

Documentation:
- README.md with quick start and architecture
- Agent configuration guide
- OpenAPI import guide
- Deployment guide
- Demo script

48 new tests, 449 total passing, 92.87% coverage
This commit is contained in:
Yaojia Wang
2026-03-31 21:20:06 +02:00
parent 38644594d2
commit 0e78e5b06b
44 changed files with 3397 additions and 169 deletions

View File

@@ -0,0 +1,153 @@
"""Seed script -- inserts sample conversations and analytics events for demo purposes.
Usage:
cd backend
python fixtures/demo_data.py
"""
from __future__ import annotations
import asyncio
import os
import sys
from datetime import datetime, timedelta, timezone
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent))
import psycopg
DATABASE_URL = os.environ.get(
"DATABASE_URL",
"postgresql://smart_support:dev_password@localhost:5432/smart_support",
)
SAMPLE_CONVERSATIONS = [
{
"thread_id": "demo-thread-001",
"agents_used": ["order_agent"],
"turn_count": 3,
"total_tokens": 1250,
"total_cost_usd": 0.00375,
"resolution_type": "resolved",
"minutes_ago": 5,
},
{
"thread_id": "demo-thread-002",
"agents_used": ["order_agent", "refund_agent"],
"turn_count": 6,
"total_tokens": 3200,
"total_cost_usd": 0.0096,
"resolution_type": "resolved",
"minutes_ago": 30,
},
{
"thread_id": "demo-thread-003",
"agents_used": ["general_agent"],
"turn_count": 2,
"total_tokens": 800,
"total_cost_usd": 0.0024,
"resolution_type": None,
"minutes_ago": 60,
},
{
"thread_id": "demo-thread-004",
"agents_used": ["order_agent", "general_agent"],
"turn_count": 8,
"total_tokens": 4500,
"total_cost_usd": 0.0135,
"resolution_type": "escalated",
"minutes_ago": 120,
},
{
"thread_id": "demo-thread-005",
"agents_used": ["refund_agent"],
"turn_count": 4,
"total_tokens": 2100,
"total_cost_usd": 0.0063,
"resolution_type": "resolved",
"minutes_ago": 240,
},
]
SAMPLE_EVENTS = [
{"thread_id": "demo-thread-001", "event_type": "message", "agent_name": "order_agent", "tokens_used": 400, "cost_usd": 0.0012, "success": True},
{"thread_id": "demo-thread-001", "event_type": "tool_call", "agent_name": "order_agent", "tool_name": "get_order_status", "tokens_used": 0, "cost_usd": 0.0, "success": True},
{"thread_id": "demo-thread-002", "event_type": "message", "agent_name": "order_agent", "tokens_used": 1600, "cost_usd": 0.0048, "success": True},
{"thread_id": "demo-thread-002", "event_type": "message", "agent_name": "refund_agent", "tokens_used": 1600, "cost_usd": 0.0048, "success": True},
{"thread_id": "demo-thread-002", "event_type": "tool_call", "agent_name": "refund_agent", "tool_name": "process_refund", "tokens_used": 0, "cost_usd": 0.0, "success": True},
{"thread_id": "demo-thread-003", "event_type": "message", "agent_name": "general_agent", "tokens_used": 800, "cost_usd": 0.0024, "success": True},
{"thread_id": "demo-thread-004", "event_type": "message", "agent_name": "order_agent", "tokens_used": 2000, "cost_usd": 0.006, "success": True},
{"thread_id": "demo-thread-004", "event_type": "escalation", "agent_name": "general_agent", "tokens_used": 2500, "cost_usd": 0.0075, "success": False},
{"thread_id": "demo-thread-005", "event_type": "message", "agent_name": "refund_agent", "tokens_used": 2100, "cost_usd": 0.0063, "success": True},
]
_INSERT_CONVERSATION = """
INSERT INTO conversations
(thread_id, started_at, last_activity, turn_count, agents_used,
total_tokens, total_cost_usd, resolution_type, ended_at)
VALUES
(%(thread_id)s, %(started_at)s, %(last_activity)s, %(turn_count)s,
%(agents_used)s, %(total_tokens)s, %(total_cost_usd)s,
%(resolution_type)s, %(ended_at)s)
ON CONFLICT (thread_id) DO NOTHING
"""
_INSERT_EVENT = """
INSERT INTO analytics_events
(thread_id, event_type, agent_name, tool_name, tokens_used, cost_usd, success)
VALUES
(%(thread_id)s, %(event_type)s, %(agent_name)s, %(tool_name)s,
%(tokens_used)s, %(cost_usd)s, %(success)s)
"""
async def seed() -> None:
now = datetime.now(tz=timezone.utc)
async with await psycopg.AsyncConnection.connect(DATABASE_URL) as conn:
print("Seeding conversations...")
for conv in SAMPLE_CONVERSATIONS:
started_at = now - timedelta(minutes=conv["minutes_ago"])
last_activity = started_at + timedelta(minutes=conv["turn_count"] * 2)
ended_at = last_activity if conv["resolution_type"] else None
await conn.execute(
_INSERT_CONVERSATION,
{
"thread_id": conv["thread_id"],
"started_at": started_at,
"last_activity": last_activity,
"turn_count": conv["turn_count"],
"agents_used": conv["agents_used"],
"total_tokens": conv["total_tokens"],
"total_cost_usd": conv["total_cost_usd"],
"resolution_type": conv["resolution_type"],
"ended_at": ended_at,
},
)
print(f" Inserted conversation {conv['thread_id']}")
print("Seeding analytics events...")
for event in SAMPLE_EVENTS:
await conn.execute(
_INSERT_EVENT,
{
"thread_id": event["thread_id"],
"event_type": event["event_type"],
"agent_name": event.get("agent_name"),
"tool_name": event.get("tool_name"),
"tokens_used": event.get("tokens_used", 0),
"cost_usd": event.get("cost_usd", 0.0),
"success": event.get("success"),
},
)
print(f" Inserted event {event['event_type']} for {event['thread_id']}")
await conn.commit()
print("Done. Demo data seeded successfully.")
if __name__ == "__main__":
asyncio.run(seed())