refactor: fix architectural issues across frontend and backend
Address all architecture review findings: P0 fixes: - Add API key authentication for admin endpoints (analytics, replay, openapi) and WebSocket connections via ADMIN_API_KEY env var - Add PostgreSQL-backed PgSessionManager and PgInterruptManager for multi-worker production deployments (in-memory defaults preserved) P1 fixes: - Implement actual tool generation in OpenAPI approve_job endpoint using generate_tool_code() and generate_agent_yaml() - Add missing clarification, interrupt_expired, and tool_result message handlers in frontend ChatPage P2 fixes: - Replace monkey-patching on CompiledStateGraph with typed GraphContext - Replace 9-param dispatch_message with WebSocketContext dataclass - Extract duplicate _envelope() into shared app/api_utils.py - Replace mutable module-level counter with crypto.randomUUID() - Remove hardcoded mock data from ReviewPage, use api.ts wrappers - Remove `as any` type escape from ReplayPage All 516 tests passing, 0 TypeScript errors.
This commit is contained in:
@@ -15,15 +15,21 @@ import re
|
||||
import uuid
|
||||
from typing import Literal
|
||||
|
||||
from fastapi import APIRouter, BackgroundTasks, HTTPException
|
||||
from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException
|
||||
from pydantic import BaseModel, field_validator
|
||||
|
||||
from app.auth import require_admin_api_key
|
||||
from app.openapi.generator import generate_agent_yaml, generate_tool_code
|
||||
from app.openapi.importer import ImportOrchestrator
|
||||
from app.openapi.models import ClassificationResult, ImportJob
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/api/openapi", tags=["openapi"])
|
||||
router = APIRouter(
|
||||
prefix="/api/openapi",
|
||||
tags=["openapi"],
|
||||
dependencies=[Depends(require_admin_api_key)],
|
||||
)
|
||||
|
||||
# In-memory store: job_id -> job dict, guarded by async lock
|
||||
_job_store: dict[str, dict] = {}
|
||||
@@ -235,11 +241,42 @@ async def update_classification(
|
||||
|
||||
@router.post("/jobs/{job_id}/approve")
|
||||
async def approve_job(job_id: str) -> dict:
|
||||
"""Approve a job's classifications and trigger tool generation."""
|
||||
"""Approve a job's classifications and trigger tool generation.
|
||||
|
||||
Generates Python tool code for each classified endpoint and
|
||||
produces an agent YAML configuration snippet.
|
||||
"""
|
||||
job = _job_store.get(job_id)
|
||||
if job is None:
|
||||
raise HTTPException(status_code=404, detail=f"Job '{job_id}' not found")
|
||||
|
||||
updated_job = {**job, "status": "approved"}
|
||||
classifications: list[ClassificationResult] = job.get("classifications", [])
|
||||
if not classifications:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="No classifications to approve. Import must complete first.",
|
||||
)
|
||||
|
||||
base_url = job["spec_url"].rsplit("/", 1)[0]
|
||||
generated_tools = []
|
||||
for clf in classifications:
|
||||
tool = generate_tool_code(clf, base_url)
|
||||
generated_tools.append({
|
||||
"function_name": tool.function_name,
|
||||
"agent_group": clf.agent_group,
|
||||
"code": tool.code,
|
||||
})
|
||||
|
||||
agent_yaml = generate_agent_yaml(tuple(classifications), base_url)
|
||||
|
||||
updated_job = {
|
||||
**job,
|
||||
"status": "approved",
|
||||
"generated_tools": generated_tools,
|
||||
"agent_yaml": agent_yaml,
|
||||
}
|
||||
_job_store[job_id] = updated_job
|
||||
return _job_to_response(updated_job)
|
||||
|
||||
response = _job_to_response(updated_job)
|
||||
response["generated_tools_count"] = len(generated_tools)
|
||||
return response
|
||||
|
||||
Reference in New Issue
Block a user