diff --git a/webhook_server/libs/github_api.py b/webhook_server/libs/github_api.py index e070167d..58daf24d 100644 --- a/webhook_server/libs/github_api.py +++ b/webhook_server/libs/github_api.py @@ -14,6 +14,7 @@ from typing import Any import github +import httpx from github import GithubException from github.Commit import Commit from github.PullRequest import PullRequest @@ -42,6 +43,7 @@ ) from webhook_server.utils.context import WebhookContext, get_context from webhook_server.utils.github_repository_settings import ( + DEFAULT_BRANCH_PROTECTION, get_repository_github_app_api, ) from webhook_server.utils.helpers import ( @@ -733,6 +735,34 @@ def _repo_data_from_config(self, repository_config: dict[str, Any]) -> None: value="create-issue-for-new-pr", return_on_none=global_create_issue_for_new_pr, extra_dict=repository_config ) + # Read required_conversation_resolution from branch-protection config + _bp_key = "required_conversation_resolution" + _bp_raw_default = DEFAULT_BRANCH_PROTECTION[_bp_key] + if not isinstance(_bp_raw_default, bool): + raise TypeError( + f"DEFAULT_BRANCH_PROTECTION[{_bp_key!r}] must be bool, got {type(_bp_raw_default).__name__}" + ) + _bp_default: bool = _bp_raw_default + _global_bp: dict[str, Any] = self.config.get_value(value="branch-protection", return_on_none={}) + _global_bp = _global_bp if isinstance(_global_bp, dict) else {} + _repo_bp: dict[str, Any] = self.config.get_value( + value="branch-protection", return_on_none={}, extra_dict=repository_config + ) + _repo_bp = _repo_bp if isinstance(_repo_bp, dict) else {} + # Repository-level overrides global; default from DEFAULT_BRANCH_PROTECTION + self.required_conversation_resolution: bool = _bp_default + for _bp_scope, _bp_dict in [("global", _global_bp), ("repository", _repo_bp)]: + if _bp_key in _bp_dict: + _bp_val = _bp_dict[_bp_key] + if isinstance(_bp_val, bool): + self.required_conversation_resolution = _bp_val + else: + _log_prefix = getattr(self, "log_prefix", "") + self.logger.warning( + f"{_log_prefix} Invalid branch-protection.{_bp_key} value in {_bp_scope} config: " + f"{_bp_val!r} (expected bool), keeping current value: {self.required_conversation_resolution}" + ) + # Load labels configuration _global_labels = self.config.get_value("labels", return_on_none={}) global_labels_config: dict[str, Any] = _global_labels if isinstance(_global_labels, dict) else {} @@ -841,6 +871,110 @@ async def get_pull_request(self, number: int | None = None) -> PullRequest | Non self.logger.debug(f"{self.log_prefix} All PR lookup strategies exhausted, no PR found") return None + async def get_unresolved_review_threads(self, pr_number: int) -> list[dict[str, Any]]: + """Fetch unresolved review threads for a PR using GitHub GraphQL API. + + Paginates through all review threads and returns only those that are + unresolved (including outdated ones), with a URL link to the first comment. + + Args: + pr_number: The pull request number. + + Returns: + List of dicts with keys: path, line, url, isOutdated for each + unresolved thread. + + Raises: + httpx.HTTPStatusError: If the GraphQL request fails. + ValueError: If the GraphQL response contains errors or PR not found. + """ + owner, repo = self.repository_full_name.split("/") + query = """ + query($owner: String!, $repo: String!, $prNumber: Int!, $cursor: String) { + repository(owner: $owner, name: $repo) { + pullRequest(number: $prNumber) { + reviewThreads(first: 100, after: $cursor) { + pageInfo { + hasNextPage + endCursor + } + nodes { + isResolved + isOutdated + comments(first: 1) { + nodes { + url + path + line + } + } + } + } + } + } + } + """ + unresolved_threads: list[dict[str, Any]] = [] + cursor: str | None = None + + async with httpx.AsyncClient() as client: + while True: + variables: dict[str, Any] = { + "owner": owner, + "repo": repo, + "prNumber": pr_number, + "cursor": cursor, + } + response = await client.post( + "https://api.github.com/graphql", + json={"query": query, "variables": variables}, + headers={ + "Authorization": f"Bearer {self.token}", + "Content-Type": "application/json", + }, + timeout=30.0, + ) + response.raise_for_status() + data = response.json() + + if "errors" in data: + raise ValueError(f"GraphQL errors: {data['errors']}") + + repo_data = data["data"]["repository"] + if repo_data is None: + raise ValueError(f"Repository {self.repository_full_name} not found or inaccessible") + + pr_data = repo_data["pullRequest"] + if pr_data is None: + raise ValueError(f"Pull request #{pr_number} not found in {self.repository_full_name}") + + review_threads = pr_data["reviewThreads"] + threads: list[dict[str, Any]] = review_threads["nodes"] + + for thread in threads: + if not thread["isResolved"]: + comments = thread.get("comments", {}).get("nodes", []) + first_comment = comments[0] if comments else {} + unresolved_threads.append({ + "path": first_comment.get("path"), + "line": first_comment.get("line"), + "url": first_comment.get("url"), + "isOutdated": thread["isOutdated"], + }) + + page_info = review_threads["pageInfo"] + if not page_info["hasNextPage"]: + break + + cursor = page_info["endCursor"] + if not cursor: + raise ValueError( + f"GitHub GraphQL pagination invariant broken for PR #{pr_number}: " + "hasNextPage=True with null endCursor" + ) + + return unresolved_threads + async def _get_last_commit(self, pull_request: PullRequest) -> Commit: _commits = await asyncio.to_thread(pull_request.get_commits) return list(_commits)[-1] diff --git a/webhook_server/libs/handlers/pull_request_handler.py b/webhook_server/libs/handlers/pull_request_handler.py index 3b1743bb..a26eb4e7 100644 --- a/webhook_server/libs/handlers/pull_request_handler.py +++ b/webhook_server/libs/handlers/pull_request_handler.py @@ -1101,13 +1101,18 @@ async def check_if_can_be_merged(self, pull_request: PullRequest) -> None: try: self.logger.info(f"{self.log_prefix} Check if {CAN_BE_MERGED_STR}.") await self.check_run_handler.set_check_in_progress(name=CAN_BE_MERGED_STR) - # Fetch check runs and statuses in parallel (2 API calls → 1 concurrent operation) - _check_runs, _statuses = await asyncio.gather( - asyncio.to_thread(lambda: list(self.github_webhook.last_commit.get_check_runs())), - asyncio.to_thread(lambda: list(self.github_webhook.last_commit.get_statuses())), - ) - last_commit_check_runs = _check_runs - last_commit_statuses = _statuses + # Fetch check runs, statuses, and optionally unresolved threads in parallel + _check_runs_task = asyncio.to_thread(lambda: list(self.github_webhook.last_commit.get_check_runs())) + _statuses_task = asyncio.to_thread(lambda: list(self.github_webhook.last_commit.get_statuses())) + _unresolved_threads: list[dict[str, Any]] = [] + + if self.github_webhook.required_conversation_resolution: + _threads_task = self.github_webhook.get_unresolved_review_threads(pr_number=pull_request.number) + last_commit_check_runs, last_commit_statuses, _unresolved_threads = await asyncio.gather( + _check_runs_task, _statuses_task, _threads_task + ) + else: + last_commit_check_runs, last_commit_statuses = await asyncio.gather(_check_runs_task, _statuses_task) self.logger.debug( f"{self.log_prefix} Fetched {len(last_commit_check_runs)} check runs " f"and {len(last_commit_statuses)} statuses" @@ -1154,6 +1159,20 @@ async def check_if_can_be_merged(self, pull_request: PullRequest) -> None: failure_output += labels_failure_output self.logger.debug(f"{self.log_prefix} _check_labels_for_can_be_merged: {failure_output}") + if self.github_webhook.required_conversation_resolution and _unresolved_threads: + conversation_failure = f"PR has {len(_unresolved_threads)} unresolved review conversation(s):\n" + for thread in _unresolved_threads: + path = thread.get("path", "unknown") + line = thread.get("line", "N/A") + url = thread.get("url") + outdated = " (outdated)" if thread.get("isOutdated") else "" + if url: + conversation_failure += f" - {path}:{line}{outdated} ({url})\n" + else: + conversation_failure += f" - {path}:{line}{outdated}\n" + failure_output += conversation_failure + self.logger.debug(f"{self.log_prefix} unresolved_conversations: {failure_output}") + pr_approvered_failure_output = await self._check_if_pr_approved(labels=_labels) if pr_approvered_failure_output: failure_output += pr_approvered_failure_output diff --git a/webhook_server/tests/test_pull_request_handler.py b/webhook_server/tests/test_pull_request_handler.py index 89984976..7c34d1e1 100644 --- a/webhook_server/tests/test_pull_request_handler.py +++ b/webhook_server/tests/test_pull_request_handler.py @@ -1,7 +1,10 @@ import asyncio +from collections.abc import Generator +from contextlib import contextmanager from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, patch +import httpx import pytest from github import GithubException from github.PullRequest import PullRequest @@ -9,6 +12,7 @@ from webhook_server.libs.github_api import GithubWebhook from webhook_server.libs.handlers.owners_files_handler import OwnersFileHandler from webhook_server.libs.handlers.pull_request_handler import PullRequestHandler +from webhook_server.tests.conftest import TEST_GITHUB_TOKEN from webhook_server.utils.constants import ( APPROVED_BY_LABEL_PREFIX, CAN_BE_MERGED_STR, @@ -56,6 +60,7 @@ def mock_github_webhook(self) -> Mock: } mock_webhook.logger = MagicMock() mock_webhook.log_prefix = "[TEST]" + mock_webhook.repository_full_name = "test-org/test-repo" mock_webhook.repository = Mock() mock_webhook.issue_url_for_welcome_msg = "welcome-message-url" mock_webhook.parent_committer = "test-user" @@ -78,12 +83,13 @@ def mock_github_webhook(self) -> Mock: mock_webhook.pre_commit = True mock_webhook.python_module_install = False mock_webhook.pypi = False - mock_webhook.token = "test-token" # pragma: allowlist secret + mock_webhook.token = TEST_GITHUB_TOKEN mock_webhook.auto_verify_cherry_picked_prs = True mock_webhook.last_commit = Mock() mock_webhook.ctx = None mock_webhook.enabled_labels = None # Default: all labels enabled mock_webhook.custom_check_runs = [] + mock_webhook.required_conversation_resolution = False return mock_webhook @pytest.fixture @@ -797,6 +803,528 @@ async def test_check_if_can_be_merged_approved( await pull_request_handler.check_if_can_be_merged(pull_request=mock_pull_request) mock_add_label.assert_awaited_once_with(pull_request=mock_pull_request, label=CAN_BE_MERGED_STR) + @staticmethod + @contextmanager + def _can_be_merged_patch_context( + pull_request_handler: PullRequestHandler, + mock_pull_request: Mock, + *, + required_conversation_resolution: bool, + unresolved_threads: list[dict[str, Any]] | None = None, + ) -> Generator[dict[str, AsyncMock]]: + """Shared patch context for check_if_can_be_merged tests. + + Args: + pull_request_handler: The handler under test. + mock_pull_request: Mock PR object. + required_conversation_resolution: Whether the feature is enabled. + unresolved_threads: Return value for get_unresolved_review_threads. + """ + if unresolved_threads is None: + unresolved_threads = [] + + with ( + patch.object(mock_pull_request, "is_merged", new=Mock(return_value=False)), + patch.object(mock_pull_request, "mergeable", new=True), + patch.object(pull_request_handler, "_check_if_pr_approved", new=AsyncMock(return_value="")), + patch.object(pull_request_handler, "_check_labels_for_can_be_merged", return_value=""), + patch.object(pull_request_handler.labels_handler, "_add_label", new=AsyncMock()) as mock_add_label, + patch.object(pull_request_handler.labels_handler, "_remove_label", new=AsyncMock()) as mock_remove_label, + patch.object( + pull_request_handler.owners_file_handler, + "owners_data_for_changed_files", + _owners_data_coroutine(), + ), + patch.object(pull_request_handler.github_webhook, "minimum_lgtm", new=0), + patch.object( + pull_request_handler.github_webhook, + "required_conversation_resolution", + new=required_conversation_resolution, + ), + patch.object(pull_request_handler.check_run_handler, "set_check_in_progress", new=AsyncMock()), + patch.object( + pull_request_handler.check_run_handler, + "required_check_in_progress", + new=AsyncMock(return_value=("", [])), + ), + patch.object( + pull_request_handler.check_run_handler, + "required_check_failed_or_no_status", + new=AsyncMock(return_value=""), + ), + patch.object(pull_request_handler.labels_handler, "wip_or_hold_labels_exists", return_value=""), + patch.object( + pull_request_handler.labels_handler, "pull_request_labels_names", new=AsyncMock(return_value=[]) + ), + patch.object( + pull_request_handler.github_webhook, + "last_commit", + Mock(get_check_runs=Mock(return_value=[]), get_statuses=Mock(return_value=[])), + ), + patch.object( + pull_request_handler.github_webhook, + "get_unresolved_review_threads", + new=AsyncMock(return_value=unresolved_threads), + ) as mock_get_threads, + patch.object( + pull_request_handler.check_run_handler, "set_check_failure", new=AsyncMock() + ) as mock_set_check_failure, + ): + yield { + "add_label": mock_add_label, + "remove_label": mock_remove_label, + "get_threads": mock_get_threads, + "set_check_failure": mock_set_check_failure, + } + + @pytest.mark.asyncio + async def test_can_be_merged_conversation_resolution_disabled( + self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock + ) -> None: + """Test that get_unresolved_review_threads is NOT called when feature is disabled.""" + with self._can_be_merged_patch_context( + pull_request_handler, mock_pull_request, required_conversation_resolution=False + ) as mocks: + await pull_request_handler.check_if_can_be_merged(pull_request=mock_pull_request) + mocks["add_label"].assert_awaited_once_with(pull_request=mock_pull_request, label=CAN_BE_MERGED_STR) + mocks["get_threads"].assert_not_awaited() + + @pytest.mark.asyncio + async def test_can_be_merged_no_unresolved_threads( + self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock + ) -> None: + """Test that PR can be merged when conversation resolution is enabled but no unresolved threads.""" + with self._can_be_merged_patch_context( + pull_request_handler, mock_pull_request, required_conversation_resolution=True + ) as mocks: + await pull_request_handler.check_if_can_be_merged(pull_request=mock_pull_request) + mocks["add_label"].assert_awaited_once_with(pull_request=mock_pull_request, label=CAN_BE_MERGED_STR) + mocks["get_threads"].assert_awaited_once_with(pr_number=mock_pull_request.number) + + @pytest.mark.asyncio + async def test_can_be_merged_unresolved_threads_present( + self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock + ) -> None: + """Test that PR cannot be merged when unresolved review threads exist.""" + unresolved_threads = [ + { + "path": "src/main.py", + "line": 42, + "url": "https://github.com/test-org/test-repo/pull/123#discussion_r100", + "isOutdated": False, + }, + { + "path": "src/utils.py", + "line": 10, + "url": "https://github.com/test-org/test-repo/pull/123#discussion_r101", + "isOutdated": False, + }, + ] + with self._can_be_merged_patch_context( + pull_request_handler, + mock_pull_request, + required_conversation_resolution=True, + unresolved_threads=unresolved_threads, + ) as mocks: + await pull_request_handler.check_if_can_be_merged(pull_request=mock_pull_request) + mocks["remove_label"].assert_awaited_once_with(pull_request=mock_pull_request, label=CAN_BE_MERGED_STR) + mocks["set_check_failure"].assert_awaited_once() + failure_output = mocks["set_check_failure"].call_args[1]["output"]["text"] + assert "2 unresolved review conversation(s)" in failure_output + assert "src/main.py:42" in failure_output + assert "https://github.com/test-org/test-repo/pull/123#discussion_r100" in failure_output + + @pytest.mark.asyncio + async def test_can_be_merged_multiple_unresolved_threads( + self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock + ) -> None: + """Test that all unresolved threads are listed without truncation.""" + unresolved_threads = [ + { + "path": f"src/file{i}.py", + "line": i * 10, + "url": f"https://github.com/test-org/test-repo/pull/123#discussion_r{i}", + "isOutdated": False, + } + for i in range(7) + ] + with self._can_be_merged_patch_context( + pull_request_handler, + mock_pull_request, + required_conversation_resolution=True, + unresolved_threads=unresolved_threads, + ) as mocks: + await pull_request_handler.check_if_can_be_merged(pull_request=mock_pull_request) + mocks["remove_label"].assert_awaited_once_with(pull_request=mock_pull_request, label=CAN_BE_MERGED_STR) + mocks["set_check_failure"].assert_awaited_once() + failure_output = mocks["set_check_failure"].call_args[1]["output"]["text"] + assert "7 unresolved review conversation(s)" in failure_output + for i in range(7): + assert f"src/file{i}.py:{i * 10}" in failure_output + assert f"discussion_r{i}" in failure_output + + @pytest.mark.asyncio + async def test_get_unresolved_review_threads_filters_resolved( + self, pull_request_handler: PullRequestHandler + ) -> None: + """Test that resolved threads are filtered out.""" + mock_response_data = { + "data": { + "repository": { + "pullRequest": { + "reviewThreads": { + "pageInfo": {"hasNextPage": False, "endCursor": None}, + "nodes": [ + { + "isResolved": False, + "isOutdated": False, + "comments": { + "nodes": [ + {"url": "https://github.com/test/pull/1#r1", "path": "file1.py", "line": 10} + ] + }, + }, + { + "isResolved": True, + "isOutdated": False, + "comments": { + "nodes": [ + {"url": "https://github.com/test/pull/1#r2", "path": "file2.py", "line": 20} + ] + }, + }, + { + "isResolved": False, + "isOutdated": False, + "comments": { + "nodes": [ + {"url": "https://github.com/test/pull/1#r3", "path": "file3.py", "line": 30} + ] + }, + }, + ], + } + } + } + } + } + # Use Mock (not AsyncMock) for response since httpx response.json() and + # response.raise_for_status() are synchronous methods. + mock_response = Mock() + mock_response.json.return_value = mock_response_data + mock_response.raise_for_status = Mock() + + mock_client = AsyncMock() + mock_client.post.return_value = mock_response + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=False) + + # Bind the real method to the mock object + pull_request_handler.github_webhook.get_unresolved_review_threads = ( + GithubWebhook.get_unresolved_review_threads.__get__(pull_request_handler.github_webhook) + ) + pull_request_handler.github_webhook.repository_full_name = "test-org/test-repo" + pull_request_handler.github_webhook.token = TEST_GITHUB_TOKEN + + with patch("webhook_server.libs.github_api.httpx.AsyncClient", return_value=mock_client): + result = await pull_request_handler.github_webhook.get_unresolved_review_threads(pr_number=123) + + assert len(result) == 2 + assert result[0]["path"] == "file1.py" + assert result[0]["url"] == "https://github.com/test/pull/1#r1" + assert result[0]["isOutdated"] is False + assert result[1]["path"] == "file3.py" + assert result[1]["url"] == "https://github.com/test/pull/1#r3" + assert result[1]["isOutdated"] is False + + @pytest.mark.asyncio + async def test_get_unresolved_review_threads_filters_outdated( + self, pull_request_handler: PullRequestHandler + ) -> None: + """Test that outdated unresolved threads are included with isOutdated flag.""" + mock_response_data = { + "data": { + "repository": { + "pullRequest": { + "reviewThreads": { + "pageInfo": {"hasNextPage": False, "endCursor": None}, + "nodes": [ + { + "isResolved": False, + "isOutdated": False, + "comments": { + "nodes": [ + {"url": "https://github.com/test/pull/1#r1", "path": "file1.py", "line": 10} + ] + }, + }, + { + "isResolved": False, + "isOutdated": True, + "comments": { + "nodes": [ + {"url": "https://github.com/test/pull/1#r2", "path": "file2.py", "line": 20} + ] + }, + }, + { + "isResolved": False, + "isOutdated": False, + "comments": { + "nodes": [ + {"url": "https://github.com/test/pull/1#r3", "path": "file3.py", "line": 30} + ] + }, + }, + ], + } + } + } + } + } + mock_response = Mock() + mock_response.json.return_value = mock_response_data + mock_response.raise_for_status = Mock() + + mock_client = AsyncMock() + mock_client.post.return_value = mock_response + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=False) + + pull_request_handler.github_webhook.get_unresolved_review_threads = ( + GithubWebhook.get_unresolved_review_threads.__get__(pull_request_handler.github_webhook) + ) + pull_request_handler.github_webhook.repository_full_name = "test-org/test-repo" + pull_request_handler.github_webhook.token = TEST_GITHUB_TOKEN + + with patch("webhook_server.libs.github_api.httpx.AsyncClient", return_value=mock_client): + result = await pull_request_handler.github_webhook.get_unresolved_review_threads(pr_number=123) + + assert len(result) == 3 + assert result[0]["path"] == "file1.py" + assert result[0]["isOutdated"] is False + assert result[1]["path"] == "file2.py" + assert result[1]["isOutdated"] is True + assert result[2]["path"] == "file3.py" + assert result[2]["isOutdated"] is False + + @pytest.mark.asyncio + async def test_get_unresolved_review_threads_pagination(self, pull_request_handler: PullRequestHandler) -> None: + """Test that pagination fetches threads across multiple pages.""" + page1_data = { + "data": { + "repository": { + "pullRequest": { + "reviewThreads": { + "pageInfo": {"hasNextPage": True, "endCursor": "cursor_abc"}, + "nodes": [ + { + "isResolved": False, + "isOutdated": False, + "comments": { + "nodes": [ + {"url": "https://github.com/test/pull/1#r1", "path": "page1.py", "line": 1} + ] + }, + }, + ], + } + } + } + } + } + page2_data = { + "data": { + "repository": { + "pullRequest": { + "reviewThreads": { + "pageInfo": {"hasNextPage": False, "endCursor": None}, + "nodes": [ + { + "isResolved": False, + "isOutdated": False, + "comments": { + "nodes": [ + {"url": "https://github.com/test/pull/1#r2", "path": "page2.py", "line": 2} + ] + }, + }, + ], + } + } + } + } + } + + mock_response1 = Mock() + mock_response1.json.return_value = page1_data + mock_response1.raise_for_status = Mock() + + mock_response2 = Mock() + mock_response2.json.return_value = page2_data + mock_response2.raise_for_status = Mock() + + mock_client = AsyncMock() + mock_client.post.side_effect = [mock_response1, mock_response2] + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=False) + + pull_request_handler.github_webhook.get_unresolved_review_threads = ( + GithubWebhook.get_unresolved_review_threads.__get__(pull_request_handler.github_webhook) + ) + pull_request_handler.github_webhook.repository_full_name = "test-org/test-repo" + pull_request_handler.github_webhook.token = TEST_GITHUB_TOKEN + + with patch("webhook_server.libs.github_api.httpx.AsyncClient", return_value=mock_client): + result = await pull_request_handler.github_webhook.get_unresolved_review_threads(pr_number=123) + + assert len(result) == 2 + assert result[0]["path"] == "page1.py" + assert result[0]["isOutdated"] is False + assert result[1]["path"] == "page2.py" + assert result[1]["isOutdated"] is False + assert mock_client.post.call_count == 2 + # Verify second call used the cursor from first page + second_call_json = mock_client.post.call_args_list[1][1]["json"] + assert second_call_json["variables"]["cursor"] == "cursor_abc" + + @pytest.mark.asyncio + async def test_get_unresolved_review_threads_empty_comments(self, pull_request_handler: PullRequestHandler) -> None: + """Test that threads with empty comments arrays return None fields.""" + mock_response_data = { + "data": { + "repository": { + "pullRequest": { + "reviewThreads": { + "pageInfo": {"hasNextPage": False, "endCursor": None}, + "nodes": [ + { + "isResolved": False, + "isOutdated": False, + "comments": {"nodes": []}, + }, + ], + } + } + } + } + } + mock_response = Mock() + mock_response.json.return_value = mock_response_data + mock_response.raise_for_status = Mock() + + mock_client = AsyncMock() + mock_client.post.return_value = mock_response + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=False) + + pull_request_handler.github_webhook.get_unresolved_review_threads = ( + GithubWebhook.get_unresolved_review_threads.__get__(pull_request_handler.github_webhook) + ) + pull_request_handler.github_webhook.repository_full_name = "test-org/test-repo" + pull_request_handler.github_webhook.token = TEST_GITHUB_TOKEN + + with patch("webhook_server.libs.github_api.httpx.AsyncClient", return_value=mock_client): + result = await pull_request_handler.github_webhook.get_unresolved_review_threads(pr_number=123) + + assert len(result) == 1 + assert result[0]["path"] is None + assert result[0]["line"] is None + assert result[0]["url"] is None + assert result[0]["isOutdated"] is False + + @pytest.mark.asyncio + async def test_get_unresolved_review_threads_api_error(self, pull_request_handler: PullRequestHandler) -> None: + """Test that HTTP errors propagate (fail-fast).""" + pull_request_handler.github_webhook.get_unresolved_review_threads = ( + GithubWebhook.get_unresolved_review_threads.__get__(pull_request_handler.github_webhook) + ) + pull_request_handler.github_webhook.repository_full_name = "test-org/test-repo" + pull_request_handler.github_webhook.token = TEST_GITHUB_TOKEN + + mock_response = Mock() + mock_response.raise_for_status.side_effect = httpx.HTTPStatusError( + "403 Forbidden", request=Mock(), response=Mock(status_code=403) + ) + + mock_client = AsyncMock() + mock_client.post.return_value = mock_response + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=False) + + with patch("webhook_server.libs.github_api.httpx.AsyncClient", return_value=mock_client): + with pytest.raises(httpx.HTTPStatusError): + await pull_request_handler.github_webhook.get_unresolved_review_threads(pr_number=123) + + @pytest.mark.asyncio + async def test_get_unresolved_review_threads_graphql_errors(self, pull_request_handler: PullRequestHandler) -> None: + """Test that GraphQL errors raise ValueError (fail-fast).""" + pull_request_handler.github_webhook.get_unresolved_review_threads = ( + GithubWebhook.get_unresolved_review_threads.__get__(pull_request_handler.github_webhook) + ) + pull_request_handler.github_webhook.repository_full_name = "test-org/test-repo" + pull_request_handler.github_webhook.token = TEST_GITHUB_TOKEN + + mock_response_data = {"errors": [{"message": "Field 'reviewThreads' doesn't exist on type 'PullRequest'"}]} + mock_response = Mock() + mock_response.json.return_value = mock_response_data + mock_response.raise_for_status = Mock() + + mock_client = AsyncMock() + mock_client.post.return_value = mock_response + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=False) + + with patch("webhook_server.libs.github_api.httpx.AsyncClient", return_value=mock_client): + with pytest.raises(ValueError, match="GraphQL errors"): + await pull_request_handler.github_webhook.get_unresolved_review_threads(pr_number=123) + + @pytest.mark.asyncio + async def test_get_unresolved_review_threads_pr_not_found(self, pull_request_handler: PullRequestHandler) -> None: + """Test that missing PR in response raises ValueError (fail-fast).""" + pull_request_handler.github_webhook.get_unresolved_review_threads = ( + GithubWebhook.get_unresolved_review_threads.__get__(pull_request_handler.github_webhook) + ) + pull_request_handler.github_webhook.repository_full_name = "test-org/test-repo" + pull_request_handler.github_webhook.token = TEST_GITHUB_TOKEN + + mock_response_data = {"data": {"repository": {"pullRequest": None}}} + mock_response = Mock() + mock_response.json.return_value = mock_response_data + mock_response.raise_for_status = Mock() + + mock_client = AsyncMock() + mock_client.post.return_value = mock_response + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=False) + + with patch("webhook_server.libs.github_api.httpx.AsyncClient", return_value=mock_client): + with pytest.raises(ValueError, match="Pull request #123 not found"): + await pull_request_handler.github_webhook.get_unresolved_review_threads(pr_number=123) + + @pytest.mark.asyncio + async def test_get_unresolved_review_threads_repo_not_found(self, pull_request_handler: PullRequestHandler) -> None: + """Test that null repository in response raises ValueError (fail-fast).""" + pull_request_handler.github_webhook.get_unresolved_review_threads = ( + GithubWebhook.get_unresolved_review_threads.__get__(pull_request_handler.github_webhook) + ) + pull_request_handler.github_webhook.repository_full_name = "test-org/test-repo" + pull_request_handler.github_webhook.token = TEST_GITHUB_TOKEN + + mock_response_data = {"data": {"repository": None}} + mock_response = Mock() + mock_response.json.return_value = mock_response_data + mock_response.raise_for_status = Mock() + + mock_client = AsyncMock() + mock_client.post.return_value = mock_response + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=False) + + with patch("webhook_server.libs.github_api.httpx.AsyncClient", return_value=mock_client): + with pytest.raises(ValueError, match="Repository test-org/test-repo not found or inaccessible"): + await pull_request_handler.github_webhook.get_unresolved_review_threads(pr_number=123) + @pytest.mark.asyncio async def test_check_if_pr_approved_no_labels(self, pull_request_handler: PullRequestHandler) -> None: with ( diff --git a/webhook_server/tests/test_schema_validator.py b/webhook_server/tests/test_schema_validator.py index eb5353a6..c00b578a 100644 --- a/webhook_server/tests/test_schema_validator.py +++ b/webhook_server/tests/test_schema_validator.py @@ -166,7 +166,12 @@ def _validate_single_repository(self, repo_name: str, repo_config: Any) -> None: self.errors.append(f"Repository '{repo_name}' field '{field}' must be a string") # Boolean fields - boolean_fields = ["verified-job", "pre-commit", "mask-sensitive-data", "auto-verify-cherry-picked-prs"] + boolean_fields = [ + "verified-job", + "pre-commit", + "mask-sensitive-data", + "auto-verify-cherry-picked-prs", + ] for field in boolean_fields: if field in repo_config and not isinstance(repo_config[field], bool): self.errors.append(f"Repository '{repo_name}' field '{field}' must be a boolean")