feat: complete phase 3 -- OpenAPI auto-discovery, SSRF protection, tool generation
- SSRF protection: private IP blocking, DNS rebinding defense, redirect validation - OpenAPI fetcher with SSRF guard, JSON/YAML auto-detection, 10MB limit - Structural spec validator (3.0.x/3.1.x) - Endpoint parser with $ref resolution, auto-generated operation IDs - Heuristic + LLM endpoint classifier with Protocol interface - Review API at /api/openapi (import, job status, classification CRUD, approve) - @tool code generator + Agent YAML generator - Import orchestrator (fetch -> validate -> parse -> classify pipeline) - 125 new tests, 322 total passing, 93.23% coverage
This commit is contained in:
203
backend/tests/integration/test_import_pipeline.py
Normal file
203
backend/tests/integration/test_import_pipeline.py
Normal file
@@ -0,0 +1,203 @@
|
||||
"""Integration tests for the OpenAPI import pipeline orchestrator.
|
||||
|
||||
Tests the full pipeline: fetch -> validate -> parse -> classify.
|
||||
Uses mocked HTTP and mocked LLM classifier.
|
||||
|
||||
RED phase: written before implementation.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from app.openapi.models import ImportJob
|
||||
|
||||
pytestmark = pytest.mark.integration
|
||||
|
||||
_VALID_SPEC_JSON = """{
|
||||
"openapi": "3.0.0",
|
||||
"info": {"title": "Test API", "version": "1.0.0"},
|
||||
"paths": {
|
||||
"/orders": {
|
||||
"get": {
|
||||
"operationId": "list_orders",
|
||||
"summary": "List orders",
|
||||
"description": "Returns all orders",
|
||||
"responses": {"200": {"description": "OK"}}
|
||||
}
|
||||
},
|
||||
"/orders/{id}": {
|
||||
"delete": {
|
||||
"operationId": "delete_order",
|
||||
"summary": "Delete order",
|
||||
"description": "Deletes an order",
|
||||
"parameters": [
|
||||
{"name": "id", "in": "path", "required": true, "schema": {"type": "string"}}
|
||||
],
|
||||
"responses": {"204": {"description": "Deleted"}}
|
||||
}
|
||||
}
|
||||
}
|
||||
}"""
|
||||
|
||||
_INVALID_SPEC_JSON = '{"not": "a valid openapi spec"}'
|
||||
|
||||
_PUBLIC_IP = "93.184.216.34"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_classifier():
|
||||
"""A mock classifier that classifies using heuristics."""
|
||||
from app.openapi.classifier import HeuristicClassifier
|
||||
|
||||
return HeuristicClassifier()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def orchestrator(mock_classifier):
|
||||
"""Create an ImportOrchestrator with the mock classifier."""
|
||||
from app.openapi.importer import ImportOrchestrator
|
||||
|
||||
return ImportOrchestrator(classifier=mock_classifier)
|
||||
|
||||
|
||||
class TestImportOrchestratorSuccess:
|
||||
"""Tests for successful import pipeline flows."""
|
||||
|
||||
@pytest.mark.usefixtures("_mock_public_dns")
|
||||
async def test_full_pipeline_succeeds(self, orchestrator, httpx_mock) -> None:
|
||||
"""Full pipeline with valid spec and mocked HTTP succeeds."""
|
||||
httpx_mock.add_response(
|
||||
url="https://example.com/api/spec.json",
|
||||
text=_VALID_SPEC_JSON,
|
||||
headers={"content-type": "application/json"},
|
||||
)
|
||||
job = await orchestrator.start_import(
|
||||
url="https://example.com/api/spec.json",
|
||||
job_id="test-job-1",
|
||||
on_progress=None,
|
||||
)
|
||||
assert isinstance(job, ImportJob)
|
||||
assert job.status == "done"
|
||||
assert job.job_id == "test-job-1"
|
||||
assert job.total_endpoints == 2
|
||||
assert job.classified_count == 2
|
||||
assert job.error_message is None
|
||||
|
||||
@pytest.mark.usefixtures("_mock_public_dns")
|
||||
async def test_progress_callback_called_at_stages(self, orchestrator, httpx_mock) -> None:
|
||||
"""on_progress callback is called at each pipeline stage."""
|
||||
httpx_mock.add_response(
|
||||
url="https://example.com/api/spec.json",
|
||||
text=_VALID_SPEC_JSON,
|
||||
headers={"content-type": "application/json"},
|
||||
)
|
||||
stages_seen: list[str] = []
|
||||
|
||||
def on_progress(stage: str, job: ImportJob) -> None:
|
||||
stages_seen.append(stage)
|
||||
|
||||
await orchestrator.start_import(
|
||||
url="https://example.com/api/spec.json",
|
||||
job_id="test-job-2",
|
||||
on_progress=on_progress,
|
||||
)
|
||||
assert "fetching" in stages_seen
|
||||
assert "validating" in stages_seen
|
||||
assert "parsing" in stages_seen
|
||||
assert "classifying" in stages_seen
|
||||
|
||||
@pytest.mark.usefixtures("_mock_public_dns")
|
||||
async def test_none_progress_callback_does_not_raise(
|
||||
self, orchestrator, httpx_mock
|
||||
) -> None:
|
||||
"""Passing None as on_progress does not raise."""
|
||||
httpx_mock.add_response(
|
||||
url="https://example.com/api/spec.json",
|
||||
text=_VALID_SPEC_JSON,
|
||||
headers={"content-type": "application/json"},
|
||||
)
|
||||
job = await orchestrator.start_import(
|
||||
url="https://example.com/api/spec.json",
|
||||
job_id="test-job-3",
|
||||
on_progress=None,
|
||||
)
|
||||
assert job.status == "done"
|
||||
|
||||
|
||||
class TestImportOrchestratorFailures:
|
||||
"""Tests for error handling in the import pipeline."""
|
||||
|
||||
async def test_fetch_failure_sets_failed_status(self, orchestrator) -> None:
|
||||
"""When HTTP fetch fails, job status is 'failed'."""
|
||||
with patch("app.openapi.ssrf.resolve_hostname", return_value=["10.0.0.1"]):
|
||||
job = await orchestrator.start_import(
|
||||
url="http://internal.corp/spec.json",
|
||||
job_id="test-job-fail-1",
|
||||
on_progress=None,
|
||||
)
|
||||
assert job.status == "failed"
|
||||
assert job.error_message is not None
|
||||
|
||||
@pytest.mark.usefixtures("_mock_public_dns")
|
||||
async def test_validation_failure_sets_failed_status(
|
||||
self, orchestrator, httpx_mock
|
||||
) -> None:
|
||||
"""When spec validation fails, job status is 'failed'."""
|
||||
httpx_mock.add_response(
|
||||
url="https://example.com/api/bad.json",
|
||||
text=_INVALID_SPEC_JSON,
|
||||
headers={"content-type": "application/json"},
|
||||
)
|
||||
job = await orchestrator.start_import(
|
||||
url="https://example.com/api/bad.json",
|
||||
job_id="test-job-fail-2",
|
||||
on_progress=None,
|
||||
)
|
||||
assert job.status == "failed"
|
||||
assert job.error_message is not None
|
||||
|
||||
@pytest.mark.usefixtures("_mock_public_dns")
|
||||
async def test_error_message_is_descriptive(self, orchestrator, httpx_mock) -> None:
|
||||
"""Error message contains useful context."""
|
||||
httpx_mock.add_response(
|
||||
url="https://example.com/api/bad.json",
|
||||
text=_INVALID_SPEC_JSON,
|
||||
headers={"content-type": "application/json"},
|
||||
)
|
||||
job = await orchestrator.start_import(
|
||||
url="https://example.com/api/bad.json",
|
||||
job_id="test-job-fail-3",
|
||||
on_progress=None,
|
||||
)
|
||||
assert len(job.error_message) > 0
|
||||
|
||||
@pytest.mark.usefixtures("_mock_public_dns")
|
||||
async def test_failed_status_progress_called_with_failed(
|
||||
self, orchestrator, httpx_mock
|
||||
) -> None:
|
||||
"""When pipeline fails, on_progress is called with 'failed' stage."""
|
||||
httpx_mock.add_response(
|
||||
url="https://example.com/api/bad.json",
|
||||
text=_INVALID_SPEC_JSON,
|
||||
headers={"content-type": "application/json"},
|
||||
)
|
||||
stages_seen: list[str] = []
|
||||
|
||||
def on_progress(stage: str, job: ImportJob) -> None:
|
||||
stages_seen.append(stage)
|
||||
|
||||
await orchestrator.start_import(
|
||||
url="https://example.com/api/bad.json",
|
||||
job_id="test-job-fail-4",
|
||||
on_progress=on_progress,
|
||||
)
|
||||
assert "failed" in stages_seen
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _mock_public_dns():
|
||||
with patch("app.openapi.ssrf.resolve_hostname", return_value=[_PUBLIC_IP]):
|
||||
yield
|
||||
Reference in New Issue
Block a user