feat: initial commit — Billo Release Agent (LangGraph)
LangGraph-based release automation agent with: - PR discovery (webhook + polling) - AI code review via Claude Code CLI (subscription-based) - Auto-create Jira tickets for PRs without ticket ID - Jira ticket lifecycle management (code review -> staging -> done) - CI/CD pipeline trigger, polling, and approval gates - Slack interactive messages with approval buttons - Per-repo semantic versioning - PostgreSQL persistence (threads, staging, releases) - FastAPI API (webhooks, approvals, status, manual triggers) - Docker Compose deployment 1069 tests, 95%+ coverage.
This commit is contained in:
0
tests/services/__init__.py
Normal file
0
tests/services/__init__.py
Normal file
141
tests/services/test_pr_dedup.py
Normal file
141
tests/services/test_pr_dedup.py
Normal file
@@ -0,0 +1,141 @@
|
||||
"""Tests for services/pr_dedup.py. Written FIRST (TDD RED phase).
|
||||
|
||||
find_unprocessed_prs queries agent_threads to find which PRs have not yet
|
||||
been processed (no existing thread for that repo+pr_id combination).
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
from release_agent.models.pr import PRInfo
|
||||
from release_agent.services.pr_dedup import find_unprocessed_prs
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers — fake async pool
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _make_pr(pr_id: str, repo_name: str = "my-repo") -> PRInfo:
|
||||
return PRInfo(
|
||||
pr_id=pr_id,
|
||||
pr_url=f"https://dev.azure.com/org/proj/_git/{repo_name}/pullrequest/{pr_id}",
|
||||
repo_name=repo_name,
|
||||
branch="refs/heads/feature/ALLPOST-100-fix",
|
||||
pr_title=f"PR {pr_id}",
|
||||
pr_status="active",
|
||||
)
|
||||
|
||||
|
||||
def _make_pool(existing_rows: list[tuple[str, str]]):
|
||||
"""Return a fake async connection pool.
|
||||
|
||||
existing_rows: list of (pr_id, repo_name) tuples representing already-processed PRs.
|
||||
"""
|
||||
|
||||
class FakeCursor:
|
||||
def __init__(self, rows):
|
||||
self._rows = rows
|
||||
|
||||
async def execute(self, sql, params=None):
|
||||
pass
|
||||
|
||||
async def fetchall(self):
|
||||
return self._rows
|
||||
|
||||
async def __aenter__(self):
|
||||
return self
|
||||
|
||||
async def __aexit__(self, *args):
|
||||
pass
|
||||
|
||||
class FakeConn:
|
||||
def __init__(self, rows):
|
||||
self._rows = rows
|
||||
|
||||
def cursor(self):
|
||||
return FakeCursor(self._rows)
|
||||
|
||||
async def __aenter__(self):
|
||||
return self
|
||||
|
||||
async def __aexit__(self, *args):
|
||||
pass
|
||||
|
||||
class FakePool:
|
||||
def __init__(self, rows):
|
||||
self._rows = rows
|
||||
|
||||
def connection(self):
|
||||
return FakeConn(self._rows)
|
||||
|
||||
return FakePool(existing_rows)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# find_unprocessed_prs tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestFindUnprocessedPrs:
|
||||
|
||||
async def test_returns_all_when_none_processed(self) -> None:
|
||||
prs = [_make_pr("10"), _make_pr("20")]
|
||||
pool = _make_pool([])
|
||||
|
||||
result = await find_unprocessed_prs(pool, prs)
|
||||
|
||||
assert len(result) == 2
|
||||
|
||||
async def test_returns_empty_when_all_processed(self) -> None:
|
||||
prs = [_make_pr("10"), _make_pr("20")]
|
||||
# existing rows: (pr_id, repo_name)
|
||||
pool = _make_pool([("10", "my-repo"), ("20", "my-repo")])
|
||||
|
||||
result = await find_unprocessed_prs(pool, prs)
|
||||
|
||||
assert result == []
|
||||
|
||||
async def test_returns_only_unprocessed(self) -> None:
|
||||
prs = [_make_pr("10"), _make_pr("20"), _make_pr("30")]
|
||||
pool = _make_pool([("10", "my-repo")])
|
||||
|
||||
result = await find_unprocessed_prs(pool, prs)
|
||||
|
||||
pr_ids = [p.pr_id for p in result]
|
||||
assert "10" not in pr_ids
|
||||
assert "20" in pr_ids
|
||||
assert "30" in pr_ids
|
||||
|
||||
async def test_empty_input_returns_empty(self) -> None:
|
||||
pool = _make_pool([])
|
||||
|
||||
result = await find_unprocessed_prs(pool, [])
|
||||
|
||||
assert result == []
|
||||
|
||||
async def test_different_repos_not_confused(self) -> None:
|
||||
pr_repo_a = _make_pr("10", repo_name="repo-a")
|
||||
pr_repo_b = _make_pr("10", repo_name="repo-b")
|
||||
# Only repo-a/10 is processed
|
||||
pool = _make_pool([("10", "repo-a")])
|
||||
|
||||
result = await find_unprocessed_prs(pool, [pr_repo_a, pr_repo_b])
|
||||
|
||||
# repo-b/10 should still be returned (different repo)
|
||||
assert len(result) == 1
|
||||
assert result[0].repo_name == "repo-b"
|
||||
|
||||
async def test_returns_list_of_pr_info(self) -> None:
|
||||
prs = [_make_pr("42")]
|
||||
pool = _make_pool([])
|
||||
|
||||
result = await find_unprocessed_prs(pool, prs)
|
||||
|
||||
assert all(isinstance(p, PRInfo) for p in result)
|
||||
|
||||
async def test_preserves_pr_info_objects(self) -> None:
|
||||
pr = _make_pr("77")
|
||||
pool = _make_pool([])
|
||||
|
||||
result = await find_unprocessed_prs(pool, [pr])
|
||||
|
||||
assert result[0].pr_id == "77"
|
||||
assert result[0].repo_name == "my-repo"
|
||||
309
tests/services/test_pr_poller.py
Normal file
309
tests/services/test_pr_poller.py
Normal file
@@ -0,0 +1,309 @@
|
||||
"""Tests for services/pr_poller.py. Written FIRST (TDD RED phase).
|
||||
|
||||
Tests verify:
|
||||
- _synthesize_webhook_payload produces a valid payload dict
|
||||
- run_pr_poll_loop calls list_active_prs, dedup, then schedules graph for each unprocessed PR
|
||||
- Fake sleep is injected to avoid real waits
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from release_agent.models.pr import PRInfo
|
||||
from release_agent.services.pr_poller import _synthesize_webhook_payload, run_pr_poll_loop
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _make_pr(
|
||||
pr_id: str = "10",
|
||||
repo_name: str = "my-repo",
|
||||
branch: str = "refs/heads/feature/ALLPOST-100-fix",
|
||||
title: str = "Test PR",
|
||||
status: str = "active",
|
||||
) -> PRInfo:
|
||||
return PRInfo(
|
||||
pr_id=pr_id,
|
||||
pr_url=f"https://dev.azure.com/org/proj/_git/{repo_name}/pullrequest/{pr_id}",
|
||||
repo_name=repo_name,
|
||||
branch=branch,
|
||||
pr_title=title,
|
||||
pr_status=status,
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _synthesize_webhook_payload tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestSynthesizeWebhookPayload:
|
||||
def test_returns_dict(self) -> None:
|
||||
pr = _make_pr()
|
||||
result = _synthesize_webhook_payload(pr)
|
||||
assert isinstance(result, dict)
|
||||
|
||||
def test_has_resource_key(self) -> None:
|
||||
pr = _make_pr()
|
||||
result = _synthesize_webhook_payload(pr)
|
||||
assert "resource" in result
|
||||
|
||||
def test_resource_contains_pull_request_id(self) -> None:
|
||||
pr = _make_pr(pr_id="42")
|
||||
result = _synthesize_webhook_payload(pr)
|
||||
assert result["resource"]["pull_request_id"] == 42
|
||||
|
||||
def test_resource_contains_repository_name(self) -> None:
|
||||
pr = _make_pr(repo_name="backend-api")
|
||||
result = _synthesize_webhook_payload(pr)
|
||||
assert result["resource"]["repository"]["name"] == "backend-api"
|
||||
|
||||
def test_resource_contains_title(self) -> None:
|
||||
pr = _make_pr(title="My PR Title")
|
||||
result = _synthesize_webhook_payload(pr)
|
||||
assert result["resource"]["title"] == "My PR Title"
|
||||
|
||||
def test_resource_contains_source_ref_name(self) -> None:
|
||||
pr = _make_pr(branch="refs/heads/feature/ALLPOST-200-test")
|
||||
result = _synthesize_webhook_payload(pr)
|
||||
assert result["resource"]["source_ref_name"] == "refs/heads/feature/ALLPOST-200-test"
|
||||
|
||||
def test_resource_status_is_active(self) -> None:
|
||||
pr = _make_pr(status="active")
|
||||
result = _synthesize_webhook_payload(pr)
|
||||
assert result["resource"]["status"] == "active"
|
||||
|
||||
def test_event_type_is_pr_updated(self) -> None:
|
||||
pr = _make_pr()
|
||||
result = _synthesize_webhook_payload(pr)
|
||||
assert "event_type" in result
|
||||
|
||||
def test_subscription_id_present(self) -> None:
|
||||
pr = _make_pr()
|
||||
result = _synthesize_webhook_payload(pr)
|
||||
assert "subscription_id" in result
|
||||
|
||||
def test_different_prs_produce_different_payloads(self) -> None:
|
||||
pr1 = _make_pr(pr_id="1")
|
||||
pr2 = _make_pr(pr_id="2")
|
||||
r1 = _synthesize_webhook_payload(pr1)
|
||||
r2 = _synthesize_webhook_payload(pr2)
|
||||
assert r1["resource"]["pull_request_id"] != r2["resource"]["pull_request_id"]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# run_pr_poll_loop tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestRunPrPollLoop:
|
||||
|
||||
async def test_calls_list_active_prs_for_each_repo(self) -> None:
|
||||
azdo = AsyncMock()
|
||||
azdo.list_active_prs = AsyncMock(return_value=[])
|
||||
|
||||
sleep_calls: list[float] = []
|
||||
|
||||
async def fake_sleep(seconds: float) -> None:
|
||||
sleep_calls.append(seconds)
|
||||
raise asyncio.CancelledError
|
||||
|
||||
with patch("release_agent.services.pr_poller.find_unprocessed_prs", new=AsyncMock(return_value=[])):
|
||||
with pytest.raises(asyncio.CancelledError):
|
||||
await run_pr_poll_loop(
|
||||
azdo_client=azdo,
|
||||
db_pool=MagicMock(),
|
||||
watched_repos=["repo-a", "repo-b"],
|
||||
target_branch="refs/heads/develop",
|
||||
interval_seconds=30,
|
||||
schedule_fn=MagicMock(),
|
||||
sleep_fn=fake_sleep,
|
||||
)
|
||||
|
||||
assert azdo.list_active_prs.call_count == 2
|
||||
|
||||
async def test_calls_find_unprocessed_prs(self) -> None:
|
||||
pr = _make_pr(pr_id="10")
|
||||
azdo = AsyncMock()
|
||||
azdo.list_active_prs = AsyncMock(return_value=[pr])
|
||||
|
||||
find_mock = AsyncMock(return_value=[])
|
||||
|
||||
async def fake_sleep(seconds: float) -> None:
|
||||
raise asyncio.CancelledError
|
||||
|
||||
with patch("release_agent.services.pr_poller.find_unprocessed_prs", new=find_mock):
|
||||
with pytest.raises(asyncio.CancelledError):
|
||||
await run_pr_poll_loop(
|
||||
azdo_client=azdo,
|
||||
db_pool=MagicMock(),
|
||||
watched_repos=["my-repo"],
|
||||
target_branch="refs/heads/develop",
|
||||
interval_seconds=30,
|
||||
schedule_fn=MagicMock(),
|
||||
sleep_fn=fake_sleep,
|
||||
)
|
||||
|
||||
find_mock.assert_called_once()
|
||||
|
||||
async def test_schedules_graph_for_each_unprocessed_pr(self) -> None:
|
||||
pr1 = _make_pr(pr_id="10")
|
||||
pr2 = _make_pr(pr_id="20")
|
||||
azdo = AsyncMock()
|
||||
azdo.list_active_prs = AsyncMock(return_value=[pr1, pr2])
|
||||
|
||||
schedule_mock = MagicMock()
|
||||
|
||||
async def fake_sleep(seconds: float) -> None:
|
||||
raise asyncio.CancelledError
|
||||
|
||||
with patch(
|
||||
"release_agent.services.pr_poller.find_unprocessed_prs",
|
||||
new=AsyncMock(return_value=[pr1, pr2]),
|
||||
):
|
||||
with pytest.raises(asyncio.CancelledError):
|
||||
await run_pr_poll_loop(
|
||||
azdo_client=azdo,
|
||||
db_pool=MagicMock(),
|
||||
watched_repos=["my-repo"],
|
||||
target_branch="refs/heads/develop",
|
||||
interval_seconds=30,
|
||||
schedule_fn=schedule_mock,
|
||||
sleep_fn=fake_sleep,
|
||||
)
|
||||
|
||||
assert schedule_mock.call_count == 2
|
||||
|
||||
async def test_does_not_schedule_already_processed_prs(self) -> None:
|
||||
pr = _make_pr(pr_id="10")
|
||||
azdo = AsyncMock()
|
||||
azdo.list_active_prs = AsyncMock(return_value=[pr])
|
||||
|
||||
schedule_mock = MagicMock()
|
||||
|
||||
async def fake_sleep(seconds: float) -> None:
|
||||
raise asyncio.CancelledError
|
||||
|
||||
# All PRs already processed
|
||||
with patch(
|
||||
"release_agent.services.pr_poller.find_unprocessed_prs",
|
||||
new=AsyncMock(return_value=[]),
|
||||
):
|
||||
with pytest.raises(asyncio.CancelledError):
|
||||
await run_pr_poll_loop(
|
||||
azdo_client=azdo,
|
||||
db_pool=MagicMock(),
|
||||
watched_repos=["my-repo"],
|
||||
target_branch="refs/heads/develop",
|
||||
interval_seconds=30,
|
||||
schedule_fn=schedule_mock,
|
||||
sleep_fn=fake_sleep,
|
||||
)
|
||||
|
||||
schedule_mock.assert_not_called()
|
||||
|
||||
async def test_sleeps_for_configured_interval(self) -> None:
|
||||
azdo = AsyncMock()
|
||||
azdo.list_active_prs = AsyncMock(return_value=[])
|
||||
|
||||
sleep_calls: list[float] = []
|
||||
|
||||
async def fake_sleep(seconds: float) -> None:
|
||||
sleep_calls.append(seconds)
|
||||
raise asyncio.CancelledError
|
||||
|
||||
with patch("release_agent.services.pr_poller.find_unprocessed_prs", new=AsyncMock(return_value=[])):
|
||||
with pytest.raises(asyncio.CancelledError):
|
||||
await run_pr_poll_loop(
|
||||
azdo_client=azdo,
|
||||
db_pool=MagicMock(),
|
||||
watched_repos=["my-repo"],
|
||||
target_branch="refs/heads/develop",
|
||||
interval_seconds=123,
|
||||
schedule_fn=MagicMock(),
|
||||
sleep_fn=fake_sleep,
|
||||
)
|
||||
|
||||
assert sleep_calls[0] == 123
|
||||
|
||||
async def test_handles_empty_watched_repos(self) -> None:
|
||||
azdo = AsyncMock()
|
||||
|
||||
async def fake_sleep(seconds: float) -> None:
|
||||
raise asyncio.CancelledError
|
||||
|
||||
with patch("release_agent.services.pr_poller.find_unprocessed_prs", new=AsyncMock(return_value=[])):
|
||||
with pytest.raises(asyncio.CancelledError):
|
||||
await run_pr_poll_loop(
|
||||
azdo_client=azdo,
|
||||
db_pool=MagicMock(),
|
||||
watched_repos=[],
|
||||
target_branch="refs/heads/develop",
|
||||
interval_seconds=30,
|
||||
schedule_fn=MagicMock(),
|
||||
sleep_fn=fake_sleep,
|
||||
)
|
||||
|
||||
azdo.list_active_prs.assert_not_called()
|
||||
|
||||
async def test_schedule_fn_receives_synthesized_payload(self) -> None:
|
||||
pr = _make_pr(pr_id="55", repo_name="test-repo")
|
||||
azdo = AsyncMock()
|
||||
azdo.list_active_prs = AsyncMock(return_value=[pr])
|
||||
|
||||
schedule_calls: list[dict] = []
|
||||
|
||||
def schedule_mock(**kwargs) -> None:
|
||||
schedule_calls.append(kwargs)
|
||||
|
||||
async def fake_sleep(seconds: float) -> None:
|
||||
raise asyncio.CancelledError
|
||||
|
||||
with patch(
|
||||
"release_agent.services.pr_poller.find_unprocessed_prs",
|
||||
new=AsyncMock(return_value=[pr]),
|
||||
):
|
||||
with pytest.raises(asyncio.CancelledError):
|
||||
await run_pr_poll_loop(
|
||||
azdo_client=azdo,
|
||||
db_pool=MagicMock(),
|
||||
watched_repos=["test-repo"],
|
||||
target_branch="refs/heads/develop",
|
||||
interval_seconds=30,
|
||||
schedule_fn=schedule_mock,
|
||||
sleep_fn=fake_sleep,
|
||||
)
|
||||
|
||||
assert len(schedule_calls) == 1
|
||||
initial_state = schedule_calls[0]["initial_state"]
|
||||
assert initial_state["webhook_payload"]["resource"]["pull_request_id"] == 55
|
||||
assert initial_state["pr_id"] == "55"
|
||||
assert initial_state["repo_name"] == "test-repo"
|
||||
|
||||
async def test_continues_after_list_active_prs_error(self) -> None:
|
||||
azdo = AsyncMock()
|
||||
# First repo raises, second succeeds
|
||||
azdo.list_active_prs = AsyncMock(side_effect=[Exception("API error"), []])
|
||||
|
||||
sleep_calls: list[float] = []
|
||||
|
||||
async def fake_sleep(seconds: float) -> None:
|
||||
sleep_calls.append(seconds)
|
||||
raise asyncio.CancelledError
|
||||
|
||||
with patch("release_agent.services.pr_poller.find_unprocessed_prs", new=AsyncMock(return_value=[])):
|
||||
with pytest.raises(asyncio.CancelledError):
|
||||
await run_pr_poll_loop(
|
||||
azdo_client=azdo,
|
||||
db_pool=MagicMock(),
|
||||
watched_repos=["repo-a", "repo-b"],
|
||||
target_branch="refs/heads/develop",
|
||||
interval_seconds=30,
|
||||
schedule_fn=MagicMock(),
|
||||
sleep_fn=fake_sleep,
|
||||
)
|
||||
|
||||
# Should still sleep (loop iteration completed despite error)
|
||||
assert len(sleep_calls) == 1
|
||||
Reference in New Issue
Block a user